def test_integrated_hgrepo_apply_patch_newline_bug(hg_clone): """Test newline bug in Mercurial See https://bugzilla.mozilla.org/show_bug.cgi?id=1541181 for context. This test will be skipped if not using version 5.1 of Mercurial. """ # TODO: update test if version of Mercurial is changed. repo = HgRepo(hg_clone.strpath) with repo, hg_clone.as_cwd(): if ( repo.run_hg(["version"]).split(b"\n")[0] != b"Mercurial Distributed SCM (version 5.1)" ): pytest.skip("Test not relevant for this version of mercurial") # Create a file without a new line and with a trailing `\r` # Note that to reproduce this bug, this file needs to already exist # in the repo and not be imported in a patch. new_file = hg_clone.join("test-file") new_file.write(b"hello\r", mode="wb") repo.run_hg_cmds( [["add", new_file.strpath], ["commit", "-m", "adding file"], ["push"]] ) repo.apply_patch(io.BytesIO(PATCH_DELETE_NO_NEWLINE_FILE)) # Commit created. assert "file removed" in str(repo.run_hg(["outgoing"]))
def start(self): logger.info("Landing worker starting") logger.info( f"{len(self.applicable_repos)} applicable repos: {self.applicable_repos}" ) if self.ssh_private_key: self._setup_ssh(self.ssh_private_key) self.running = True # Initialize state self.refresh_enabled_repos() last_job_finished = True while self.running: # Check if any closed trees reopened since the beginning of this iteration if len(self.enabled_repos) != len(self.applicable_repos): self.refresh_enabled_repos() if not last_job_finished: logger.info( "Last job did not complete, waiting for {} seconds".format( self.sleep_seconds ) ) time.sleep(self.sleep_seconds) self.refresh_enabled_repos() job = LandingJob.next_job_for_update_query( repositories=self.enabled_repos ).first() if job is None: time.sleep(self.sleep_seconds) continue with job_processing(self, job, db): job.status = LandingJobStatus.IN_PROGRESS job.attempts += 1 # Make sure the status and attempt count are updated in the database db.session.commit() repo = repo_clone_subsystem.repos[job.repository_name] hgrepo = HgRepo( str(repo_clone_subsystem.repo_paths[job.repository_name]), config=repo.config_override, ) logger.info("Starting landing job", extra={"id": job.id}) last_job_finished = self.run_job( job, repo, hgrepo, treestatus_subsystem.client, current_app.config["PATCH_BUCKET_NAME"], ) logger.info("Finished processing landing job", extra={"id": job.id}) logger.info("Landing worker exited")
def test_lose_push_race(app, db, s3, mock_repo_config, hg_server, hg_clone, treestatusdouble, upload_patch): treestatus = treestatusdouble.get_treestatus_client() treestatusdouble.open_tree("mozilla-central") repo = Repo( tree="mozilla-central", url=hg_server, access_group=SCM_LEVEL_3, push_path=hg_server, pull_path=hg_server, ) hgrepo = HgRepo(hg_clone.strpath) upload_patch(1, patch=PATCH_PUSH_LOSER) job = LandingJob( id=1234, status=LandingJobStatus.IN_PROGRESS, requester_email="*****@*****.**", repository_name="mozilla-central", revision_to_diff_id={"1": 1}, revision_order=["1"], attempts=1, ) worker = LandingWorker(sleep_seconds=0) assert not worker.run_job(job, repo, hgrepo, treestatus, "landoapi.test.bucket") assert job.status is LandingJobStatus.DEFERRED
def test_integrated_execute_job(app, db, s3, mock_repo_config, hg_server, hg_clone, treestatusdouble, upload_patch): treestatus = treestatusdouble.get_treestatus_client() treestatusdouble.open_tree("mozilla-central") repo = Repo( tree="mozilla-central", url=hg_server, access_group=SCM_LEVEL_3, push_path=hg_server, pull_path=hg_server, legacy_transplant=False, ) hgrepo = HgRepo(hg_clone.strpath) upload_patch(1) upload_patch(2) job = LandingJob( status=LandingJobStatus.IN_PROGRESS, requester_email="*****@*****.**", repository_name="mozilla-central", revision_to_diff_id={ "1": 1, "2": 2 }, revision_order=["1", "2"], attempts=1, ) worker = LandingWorker(sleep_seconds=0.01) assert worker.run_job(job, repo, hgrepo, treestatus, "landoapi.test.bucket") assert job.status is LandingJobStatus.LANDED assert len(job.landed_commit_id) == 40
def test_failed_landing_job_notification( app, db, s3, mock_repo_config, hg_server, hg_clone, treestatusdouble, monkeypatch, upload_patch, ): """Ensure that a failed landings triggers a user notification. """ treestatus = treestatusdouble.get_treestatus_client() treestatusdouble.open_tree("mozilla-central") repo = Repo("mozilla-central", SCM_LEVEL_3, "", hg_server, hg_server, True, hg_server, False) hgrepo = HgRepo(hg_clone.strpath) upload_patch(1) upload_patch(2) job = LandingJob( status=LandingJobStatus.IN_PROGRESS, requester_email="*****@*****.**", repository_name="mozilla-central", revision_to_diff_id={ "1": 1, "2": 2 }, revision_order=["1", "2"], attempts=1, ) worker = LandingWorker(sleep_seconds=0.01) # Mock `hgrepo.update_repo` so we can force a failed landing. mock_update_repo = mock.MagicMock() mock_update_repo.side_effect = Exception("Forcing a failed landing") monkeypatch.setattr(hgrepo, "update_repo", mock_update_repo) # Mock `notify_user_of_landing_failure` so we can make sure that it was called. mock_notify = mock.MagicMock() monkeypatch.setattr( "landoapi.landing_worker.notify_user_of_landing_failure", mock_notify) assert worker.run_job(job, repo, hgrepo, treestatus, "landoapi.test.bucket") assert job.status is LandingJobStatus.FAILED assert mock_notify.call_count == 1
def ready(self): clones_path = self.flask_app.config["REPO_CLONES_PATH"] repo_names = self.flask_app.config["REPOS_TO_LAND"] if not clones_path and not repo_names: return None clones_path = pathlib.Path(self.flask_app.config["REPO_CLONES_PATH"]) if not clones_path.exists() or not clones_path.is_dir(): return ( "REPO_CLONES_PATH ({}) is not a valid path to an existing " "directory for holding repository clones.".format(clones_path)) repo_names = set( filter(None, (r.strip() for r in repo_names.split(",")))) if not repo_names: return ( "REPOS_TO_LAND does not contain a valid comma seperated list " "of repository names.") repos = get_repos_for_env(self.flask_app.config.get("ENVIRONMENT")) if not all(r in repos for r in repo_names): return "REPOS_TO_LAND contains unsupported repository names." self.repos = {name: repos[name] for name in repo_names} self.repo_paths = {} from landoapi.hg import HgRepo for name, repo in ((name, repos[name]) for name in repo_names): path = clones_path.joinpath(name) r = HgRepo(str(path)) if path.exists(): logger.info("Repo exists, pulling.", extra={"repo": name}) with r.for_pull(): r.update_repo(repo.pull_path) else: logger.info("Cloning repo.", extra={"repo": name}) r.clone(repo.pull_path) logger.info("Repo ready.", extra={"repo": name}) self.repo_paths[name] = path return True
def test_integrated_execute_job( app, db, s3, mock_repo_config, hg_server, hg_clone, treestatusdouble ): treestatus = treestatusdouble.get_treestatus_client() treestatusdouble.open_tree("mozilla-central") repo = Repo( "mozilla-central", SCM_LEVEL_3, "", hg_server, hg_server, True, hg_server, False ) hgrepo = HgRepo(hg_clone.strpath) patches.upload( 1, 1, PATCH_NORMAL_1, "landoapi.test.bucket", aws_access_key=None, aws_secret_key=None, ) patches.upload( 2, 2, PATCH_NORMAL_2, "landoapi.test.bucket", aws_access_key=None, aws_secret_key=None, ) job = LandingJob( status=LandingJobStatus.IN_PROGRESS, requester_email="*****@*****.**", repository_name="mozilla-central", revision_to_diff_id={"1": 1, "2": 2}, revision_order=["1", "2"], attempts=1, ) worker = LandingWorker(sleep_seconds=0.01) assert worker.run_job(job, repo, hgrepo, treestatus, "landoapi.test.bucket") assert job.status is LandingJobStatus.LANDED assert len(job.landed_commit_id) == 40
def test_integrated_hgrepo_can_log(hg_clone): repo = HgRepo(hg_clone.strpath) with repo: assert repo.run_hg_cmds([["log"]])
def test_integrated_hgrepo_apply_patch(hg_clone): repo = HgRepo(hg_clone.strpath) # We should refuse to apply patches that are missing a # Diff Start Line header. with pytest.raises(NoDiffStartLine), repo: repo.apply_patch(io.BytesIO(PATCH_WITHOUT_STARTLINE)) # Patches with conflicts should raise a proper PatchConflict exception. with pytest.raises(PatchConflict), repo: repo.apply_patch(io.BytesIO(PATCH_WITH_CONFLICT)) with repo: repo.apply_patch(io.BytesIO(PATCH_NORMAL)) # Commit created. assert repo.run_hg(["outgoing"]) with repo: repo.apply_patch(io.BytesIO(PATCH_UNICODE)) # Commit created. log_output = repo.run_hg(["log"]) assert "こんにちは" in log_output.decode("utf-8") assert repo.run_hg(["outgoing"]) with repo: repo.apply_patch(io.BytesIO(PATCH_FAIL_HEADER)) # Commit created. assert repo.run_hg(["outgoing"])
def test_integrated_hgrepo_clean_repo(hg_clone): # Test is long and checks various repo cleaning cases as the startup # time for anything using `hg_clone` fixture is very long. repo = HgRepo(hg_clone.strpath) with repo, hg_clone.as_cwd(): # Create a draft commits to clean. new_file = hg_clone.join("new-file.txt") new_file.write("text", mode="w+") repo.run_hg_cmds( [["add", new_file.strpath], ["commit", "-m", "new draft commit"]] ) assert repo.run_hg_cmds([["outgoing"]]) # Dirty the working directory. new_file.write("Extra data", mode="a") assert repo.run_hg_cmds([["status"]]) # Can clean working directory without nuking commits repo.clean_repo(strip_non_public_commits=False) assert repo.run_hg_cmds([["outgoing"]]) assert not repo.run_hg_cmds([["status"]]) # Dirty the working directory again. new_file.write("Extra data", mode="a") assert repo.run_hg_cmds([["status"]]) # Cleaning should remove commit and clean working directory. repo.clean_repo() with pytest.raises(HgCommandError, match="no changes found"): repo.run_hg_cmds([["outgoing"]]) assert not repo.run_hg_cmds([["status"]]) # Create a commit and dirty the directory before exiting # the context manager as entering a new context should # provide a clean repo. new_file.write("text", mode="w+") repo.run_hg_cmds( [["add", new_file.strpath], ["commit", "-m", "new draft commit"]] ) new_file.write("extra data", mode="a") assert repo.run_hg_cmds([["outgoing"]]) assert repo.run_hg_cmds([["status"]]) with repo, hg_clone.as_cwd(): # New context should be clean. with pytest.raises(HgCommandError, match="no changes found"): repo.run_hg_cmds([["outgoing"]]) assert not repo.run_hg_cmds([["status"]])