def get_content(self, cvs_rev):
        # Is EOL fixing requested?
        eol_fix = cvs_rev.get_property('_eol_fix') or None

        # How do we want keywords to be handled?
        keyword_handling = cvs_rev.get_property('_keyword_handling') or None

        try:
            (k_option,
             explicit_keyword_handling) = self._text_options[bool(eol_fix),
                                                             keyword_handling]
        except KeyError:
            raise FatalError(
                'Undefined _keyword_handling property (%r) for %s' % (
                    keyword_handling,
                    cvs_rev,
                ))

        data = get_command_output(self.get_pipe_command(cvs_rev, k_option))

        if Ctx().decode_apple_single:
            # Insert a filter to decode any files that are in AppleSingle
            # format:
            data = get_maybe_apple_single(data)

        if explicit_keyword_handling == 'expanded':
            data = expand_keywords(data, cvs_rev)
        elif explicit_keyword_handling == 'collapsed':
            data = collapse_keywords(data)

        if eol_fix:
            data = canonicalize_eol(data, eol_fix)

        return data
  def get_content(self, cvs_rev):
    # Is EOL fixing requested?
    eol_fix = cvs_rev.get_property('_eol_fix') or None

    # How do we want keywords to be handled?
    keyword_handling = cvs_rev.get_property('_keyword_handling') or None

    try:
      (k_option, explicit_keyword_handling) = self._text_options[
          bool(eol_fix), keyword_handling
          ]
    except KeyError:
      raise FatalError(
          'Undefined _keyword_handling property (%r) for %s'
          % (keyword_handling, cvs_rev,)
          )

    data = get_command_output(self.get_pipe_command(cvs_rev, k_option))

    if Ctx().decode_apple_single:
      # Insert a filter to decode any files that are in AppleSingle
      # format:
      data = get_maybe_apple_single(data)

    if explicit_keyword_handling == 'expanded':
      data = expand_keywords(data, cvs_rev)
    elif explicit_keyword_handling == 'collapsed':
      data = collapse_keywords(data)

    if eol_fix:
      data = canonicalize_eol(data, eol_fix)

    return data
Esempio n. 3
0
class InternalRevisionReader(RevisionReader):
    """A RevisionReader that reads the contents from an own delta store."""
    def __init__(self, compress):
        # Only import Database if an InternalRevisionReader is really
        # instantiated, because the import fails if a decent dbm is not
        # installed.
        from cvs2svn_lib.database import Database
        self._Database = Database

        self._compress = compress

    def register_artifacts(self, which_pass):
        artifact_manager.register_temp_file(config.CVS_CHECKOUT_DB, which_pass)
        artifact_manager.register_temp_file_needed(config.RCS_DELTAS_STORE,
                                                   which_pass)
        artifact_manager.register_temp_file_needed(
            config.RCS_DELTAS_INDEX_TABLE, which_pass)
        artifact_manager.register_temp_file_needed(config.RCS_TREES_STORE,
                                                   which_pass)
        artifact_manager.register_temp_file_needed(
            config.RCS_TREES_INDEX_TABLE, which_pass)

    def start(self):
        self._delta_db = IndexedDatabase(
            artifact_manager.get_temp_file(config.RCS_DELTAS_STORE),
            artifact_manager.get_temp_file(config.RCS_DELTAS_INDEX_TABLE),
            DB_OPEN_READ,
        )
        self._delta_db.__delitem__ = lambda id: None
        self._tree_db = IndexedDatabase(
            artifact_manager.get_temp_file(config.RCS_TREES_STORE),
            artifact_manager.get_temp_file(config.RCS_TREES_INDEX_TABLE),
            DB_OPEN_READ,
        )
        serializer = MarshalSerializer()
        if self._compress:
            serializer = CompressingSerializer(serializer)
        self._co_db = self._Database(
            artifact_manager.get_temp_file(config.CVS_CHECKOUT_DB),
            DB_OPEN_NEW,
            serializer,
        )

        # The set of CVSFile instances whose TextRecords have already been
        # read:
        self._loaded_files = set()

        # A map { CVSFILE : _FileTree } for files that currently have live
        # revisions:
        self._text_record_db = TextRecordDatabase(self._delta_db, self._co_db)

    def _get_text_record(self, cvs_rev):
        """Return the TextRecord instance for CVS_REV.

    If the TextRecords for CVS_REV.cvs_file haven't been loaded yet,
    do so now."""

        if cvs_rev.cvs_file not in self._loaded_files:
            for text_record in self._tree_db[cvs_rev.cvs_file.id].itervalues():
                self._text_record_db.add(text_record)
            self._loaded_files.add(cvs_rev.cvs_file)

        return self._text_record_db[cvs_rev.id]

    def get_content(self, cvs_rev):
        """Check out the text for revision C_REV from the repository.

    Return the text.  If CVS_REV has a property _keyword_handling, use
    it to determine how to handle RCS keywords in the output:

        'collapsed' -- collapse keywords

        'expanded' -- expand keywords

        'untouched' -- output keywords in the form they are found in
            the RCS file

    Note that $Log$ never actually generates a log (which makes test
    'requires_cvs()' fail).

    Revisions may be requested in any order, but if they are not
    requested in dependency order the checkout database will become
    very large.  Revisions may be skipped.  Each revision may be
    requested only once."""

        try:
            text = self._get_text_record(cvs_rev).checkout(
                self._text_record_db)
        except MalformedDeltaException, (msg):
            raise FatalError('Malformed RCS delta in %s, revision %s: %s' %
                             (cvs_rev.cvs_file.rcs_path, cvs_rev.rev, msg))

        keyword_handling = cvs_rev.get_property('_keyword_handling')

        if keyword_handling == 'untouched':
            # Leave keywords in the form that they were checked in.
            pass
        elif keyword_handling == 'collapsed':
            text = collapse_keywords(text)
        elif keyword_handling == 'expanded':
            text = expand_keywords(text, cvs_rev)
        else:
            raise FatalError(
                'Undefined _keyword_handling property (%r) for %s' % (
                    keyword_handling,
                    cvs_rev,
                ))

        if Ctx().decode_apple_single:
            # Insert a filter to decode any files that are in AppleSingle
            # format:
            text = get_maybe_apple_single(text)

        eol_fix = cvs_rev.get_property('_eol_fix')
        if eol_fix:
            text = canonicalize_eol(text, eol_fix)

        return text