def _merge_duplicates(self, setting): """Merges duplicates from the given setting.""" # Duplicates can be removed only from lists. This prevents removal of # "duplicates" from strings or tuples. if not isinstance(setting, list): return setting # Do NOT use `if not setting:` because False can be a valid value for # some settings. if setting is None or setting == []: return setting no_duplicates = merge_duplicates(as_list(setting)) return no_duplicates if len(no_duplicates) != 1 else no_duplicates[0]
# Get commits to be processed. We simply extend the list and merge any # duplicates afterwards. # # (1) Not-yet-processed commits that are already in the database. commits.extend(db.get_unprocessed_commits()) # (2) New commits in the repository. max_initial_commits = int(config['daemon']['max_initial_commits']) last_commit = db.get_topmost_commit() if last_commit is not None: commits.extend(repo.get_commits_since(last_commit)) else: commits.extend(repo.get_last_commits(max_initial_commits)) # We have to merge duplicates (if any) because the previous actions may # have introduced duplicate commits. commits = merge_duplicates(commits) # We need at least one commit to continue. if not commits: time.sleep(wait_time) continue # Insert the commits into the database so that we can then process them # in any order. If a commit is already in the database, it is # automatically skipped. db.insert_commits(commits) # Process just the newest commit and then check whether there are newer # commits. This is done so that the newest commits are processed first # to get the most meaningful feedback as soon as possible. commit = commits.pop()
def test_returns_same_list_when_no_duplicates(self): self.assertEqual(merge_duplicates([1, 2, 3]), [1, 2, 3])
def test_correctly_merges_duplicates_while_keeping_first_occurrences(self): self.assertEqual(merge_duplicates([1, 2, 1, 1, 3, 2, 3]), [1, 2, 3])
def test_returns_empty_list_for_empty_list(self): self.assertEqual(merge_duplicates([]), [])