def test_deprecated_passed(self): self.assertEqual(True, symbol_versioning.deprecated_passed(None)) self.assertEqual(True, symbol_versioning.deprecated_passed(True)) self.assertEqual(True, symbol_versioning.deprecated_passed(False)) self.assertEqual(False, symbol_versioning.deprecated_passed( symbol_versioning.DEPRECATED_PARAMETER))
def test_deprecated_passed(self): self.assertEqual(True, symbol_versioning.deprecated_passed(None)) self.assertEqual(True, symbol_versioning.deprecated_passed(True)) self.assertEqual(True, symbol_versioning.deprecated_passed(False)) self.assertEqual( False, symbol_versioning.deprecated_passed( symbol_versioning.DEPRECATED_PARAMETER))
def search_missing_revision_ids( self, revision_id=symbol_versioning.DEPRECATED_PARAMETER, find_ghosts=True, revision_ids=None, if_present_ids=None, limit=None): """See InterRepository.search_missing_revision_ids().""" # we want all revisions to satisfy revision_id in source. # but we don't want to stat every file here and there. # we want then, all revisions other needs to satisfy revision_id # checked, but not those that we have locally. # so the first thing is to get a subset of the revisions to # satisfy revision_id in source, and then eliminate those that # we do already have. # this is slow on high latency connection to self, but as this # disk format scales terribly for push anyway due to rewriting # inventory.weave, this is considered acceptable. # - RBC 20060209 if symbol_versioning.deprecated_passed(revision_id): symbol_versioning.warn( 'search_missing_revision_ids(revision_id=...) was ' 'deprecated in 2.4. Use revision_ids=[...] instead.', DeprecationWarning, stacklevel=2) if revision_ids is not None: raise AssertionError( 'revision_ids is mutually exclusive with revision_id') if revision_id is not None: revision_ids = [revision_id] del revision_id source_ids_set = self._present_source_revisions_for( revision_ids, if_present_ids) # source_ids is the worst possible case we may need to pull. # now we want to filter source_ids against what we actually # have in target, but don't try to check for existence where we know # we do not have a revision as that would be pointless. target_ids = set(self.target._all_possible_ids()) possibly_present_revisions = target_ids.intersection(source_ids_set) actually_present_revisions = set( self.target._eliminate_revisions_not_present( possibly_present_revisions)) required_revisions = source_ids_set.difference( actually_present_revisions) if revision_ids is not None: # we used get_ancestry to determine source_ids then we are assured all # revisions referenced are present as they are installed in topological order. # and the tip revision was validated by get_ancestry. result_set = required_revisions else: # if we just grabbed the possibly available ids, then # we only have an estimate of whats available and need to validate # that against the revision records. result_set = set( self.source._eliminate_revisions_not_present( required_revisions)) if limit is not None: topo_ordered = self.get_graph().iter_topo_order(result_set) result_set = set(itertools.islice(topo_ordered, limit)) return self.source.revision_ids_to_search_result(result_set)
def find_unmerged(local_branch, remote_branch, restrict='all', include_merged=None, backward=False, local_revid_range=None, remote_revid_range=None, include_merges=symbol_versioning.DEPRECATED_PARAMETER): """Find revisions from each side that have not been merged. :param local_branch: Compare the history of local_branch :param remote_branch: versus the history of remote_branch, and determine mainline revisions which have not been merged. :param restrict: ('all', 'local', 'remote') If 'all', we will return the unique revisions from both sides. If 'local', we will return None for the remote revisions, similarly if 'remote' we will return None for the local revisions. :param include_merged: Show mainline revisions only if False, all revisions otherwise. :param backward: Show oldest versions first when True, newest versions first when False. :param local_revid_range: Revision-id range for filtering local_branch revisions (lower bound, upper bound) :param remote_revid_range: Revision-id range for filtering remote_branch revisions (lower bound, upper bound) :param include_merges: Deprecated historical alias for include_merged :return: A list of [(revno, revision_id)] for the mainline revisions on each side. """ if symbol_versioning.deprecated_passed(include_merges): symbol_versioning.warn( 'include_merges was deprecated in 2.5.' ' Use include_merged instead.', DeprecationWarning, stacklevel=2) if include_merged is None: include_merged = include_merges if include_merged is None: include_merged = False local_branch.lock_read() try: remote_branch.lock_read() try: return _find_unmerged(local_branch, remote_branch, restrict=restrict, include_merged=include_merged, backward=backward, local_revid_range=local_revid_range, remote_revid_range=remote_revid_range) finally: remote_branch.unlock() finally: local_branch.unlock()
def search_missing_revision_ids(self, revision_id=symbol_versioning.DEPRECATED_PARAMETER, find_ghosts=True, revision_ids=None, if_present_ids=None, limit=None): """See InterRepository.search_missing_revision_ids().""" # we want all revisions to satisfy revision_id in source. # but we don't want to stat every file here and there. # we want then, all revisions other needs to satisfy revision_id # checked, but not those that we have locally. # so the first thing is to get a subset of the revisions to # satisfy revision_id in source, and then eliminate those that # we do already have. # this is slow on high latency connection to self, but as this # disk format scales terribly for push anyway due to rewriting # inventory.weave, this is considered acceptable. # - RBC 20060209 if symbol_versioning.deprecated_passed(revision_id): symbol_versioning.warn( 'search_missing_revision_ids(revision_id=...) was ' 'deprecated in 2.4. Use revision_ids=[...] instead.', DeprecationWarning, stacklevel=2) if revision_ids is not None: raise AssertionError( 'revision_ids is mutually exclusive with revision_id') if revision_id is not None: revision_ids = [revision_id] del revision_id source_ids_set = self._present_source_revisions_for( revision_ids, if_present_ids) # source_ids is the worst possible case we may need to pull. # now we want to filter source_ids against what we actually # have in target, but don't try to check for existence where we know # we do not have a revision as that would be pointless. target_ids = set(self.target._all_possible_ids()) possibly_present_revisions = target_ids.intersection(source_ids_set) actually_present_revisions = set( self.target._eliminate_revisions_not_present(possibly_present_revisions)) required_revisions = source_ids_set.difference(actually_present_revisions) if revision_ids is not None: # we used get_ancestry to determine source_ids then we are assured all # revisions referenced are present as they are installed in topological order. # and the tip revision was validated by get_ancestry. result_set = required_revisions else: # if we just grabbed the possibly available ids, then # we only have an estimate of whats available and need to validate # that against the revision records. result_set = set( self.source._eliminate_revisions_not_present(required_revisions)) if limit is not None: topo_ordered = self.get_graph().iter_topo_order(result_set) result_set = set(itertools.islice(topo_ordered, limit)) return self.source.revision_ids_to_search_result(result_set)
def find_unmerged(local_branch, remote_branch, restrict='all', include_merged=None, backward=False, local_revid_range=None, remote_revid_range=None, include_merges=symbol_versioning.DEPRECATED_PARAMETER): """Find revisions from each side that have not been merged. :param local_branch: Compare the history of local_branch :param remote_branch: versus the history of remote_branch, and determine mainline revisions which have not been merged. :param restrict: ('all', 'local', 'remote') If 'all', we will return the unique revisions from both sides. If 'local', we will return None for the remote revisions, similarly if 'remote' we will return None for the local revisions. :param include_merged: Show mainline revisions only if False, all revisions otherwise. :param backward: Show oldest versions first when True, newest versions first when False. :param local_revid_range: Revision-id range for filtering local_branch revisions (lower bound, upper bound) :param remote_revid_range: Revision-id range for filtering remote_branch revisions (lower bound, upper bound) :param include_merges: Deprecated historical alias for include_merged :return: A list of [(revno, revision_id)] for the mainline revisions on each side. """ if symbol_versioning.deprecated_passed(include_merges): symbol_versioning.warn( 'include_merges was deprecated in 2.5.' ' Use include_merged instead.', DeprecationWarning, stacklevel=2) if include_merged is None: include_merged = include_merges if include_merged is None: include_merged = False local_branch.lock_read() try: remote_branch.lock_read() try: return _find_unmerged( local_branch, remote_branch, restrict=restrict, include_merged=include_merged, backward=backward, local_revid_range=local_revid_range, remote_revid_range=remote_revid_range) finally: remote_branch.unlock() finally: local_branch.unlock()