def test_build_list_paginate(self): for x in range(8): Build.create(self.project) data = self.get_json(self.urlbase + '?limit=4') self.assertIn('next', data) data = self.get_json(data['next']) self.assertNotIn('next', data) data = self.get_json(self.urlbase + '?limit=4&page=2') self.assertEqual([], data['builds'])
def test_create_build(self): b = Build.create(self.proj) self.assertEqual(1, b.build_id) b = Build.create(self.proj) self.assertEqual(2, b.build_id) db.session.add(Build(self.proj, 99)) db.session.commit() b = Build.create(self.proj) self.assertEqual(100, b.build_id)
def test_build_list_paginate(self): for x in range(8): Build.create(self.project) data = self.get_json(self.urlbase + "?limit=4") self.assertIn("next", data) data = self.get_json(data["next"]) self.assertNotIn("next", data) data = self.get_json(self.urlbase + "?limit=4&page=2") self.assertEqual([], data["builds"])
def test_as_json(self): import jobserv.models orig = jobserv.models.RUN_URL_FMT r = Run(self.build, 'name1') db.session.add(r) db.session.commit() try: jobserv.models.RUN_URL_FMT = 'bar {build} | {run} | {project}' Build.create(self.proj) data = r.as_json() self.assertEqual('bar 1 | name1 | job-1', data['web_url']) finally: jobserv.models.BUILD_URL_FMT = orig
def test_unique_build_id(self): self.create_projects('job-2') proj2 = Project.query.filter_by(name='job-2').first_or_404() # both jobs should be able to have build_id=1 db.session.add(Build(self.proj, 1)) db.session.add(Build(proj2, 1)) db.session.commit() # now make sure build_id=1 can't be repeated with self.assertRaises(IntegrityError): db.session.add(Build(self.proj, 1)) db.session.commit()
def test_build_get_latest(self): b1 = Build.create(self.project) b1.trigger_name = 'test-trigger-qs' b1.status = BuildStatus.PASSED db.session.commit() b = Build.create(self.project) b.status = BuildStatus.PASSED Build.create(self.project) data = self.get_json(self.urlbase + 'latest/')['build'] self.assertEqual(b.build_id, data['build_id']) url = self.urlbase + 'latest/?trigger_name=test-trigger-qs' data = self.get_json(url)['build'] self.assertEqual(b1.build_id, data['build_id'])
def test_worker_sync_builds_uploading(self, storage): """Make sure scheduler takes into account runs that are UPLOADING. 1. Create a "synchronous" Project 2. Add an UPLOADING build and and QUEUED build Make sure the QUEUED build is not assigned """ if db.engine.dialect.name == "sqlite": self.skipTest("Test requires MySQL") rundef = {"run_url": "foo", "runner_url": "foo", "env": {}} storage().get_run_definition.return_value = json.dumps(rundef) w = Worker("w1", "ubuntu", 12, 2, "aarch64", "key", 2, ["aarch96"]) w.enlisted = True w.online = True db.session.add(w) self.create_projects("job-1") (p1, ) = Project.query.all() p1.synchronous_builds = True db.session.commit() # add active build b = Build.create(p1) r = Run(b, "p1b1r1") r.status = BuildStatus.UPLOADING r.host_tag = "aarch96" db.session.add(r) b = Build.create(p1) r = Run(b, "p1b2r1") r.host_tag = "aarch96" db.session.add(r) db.session.commit() headers = [ ("Content-type", "application/json"), ("Authorization", "Token key"), ] qs = "available_runners=1&foo=2" # There should be no work available resp = self.client.get("/workers/w1/", headers=headers, query_string=qs) self.assertEqual(200, resp.status_code, resp.data) data = json.loads(resp.data.decode()) self.assertNotIn("run-defs", data["data"]["worker"], data["data"]["worker"])
def test_stuck(self, update_run, notify): """Ensure stuck runs are failed.""" self.create_projects('proj1') b = Build.create(Project.query.all()[0]) r = Run(b, 'bla') r.status = BuildStatus.RUNNING db.session.add(r) db.session.commit() e = RunEvents(r, BuildStatus.RUNNING) e.time = datetime.datetime.utcnow() - datetime.timedelta(hours=13) db.session.add(e) db.session.commit() _check_stuck() self.assertEqual('bla', notify.call_args[0][0].name) notify.rest_mock() r.status = BuildStatus.CANCELLING e = RunEvents(r, BuildStatus.RUNNING) e.time = datetime.datetime.utcnow() - datetime.timedelta(hours=13) db.session.add(e) db.session.commit() _check_stuck() self.assertEqual('bla', notify.call_args[0][0].name) self.assertEqual('bla', update_run.call_args[0][0].name)
def trigger_build(project, reason, trigger_name, params, secrets, proj_def, queue_priority=0): proj_def = ProjectDefinition.validate_data(proj_def) b = Build.create(project) try: if reason: b.reason = reason if trigger_name: b.trigger_name = trigger_name storage = Storage() storage.create_project_definition( b, yaml.dump(proj_def._data, default_flow_style=False)) trigger = proj_def.get_trigger(trigger_name) if not trigger: raise KeyError("Project(%s) does not have a trigger: %s" % (project, trigger_name)) if trigger.get("triggers"): # there's a trigger to run after all the runs for this trigger # completed. it will need to know the parameters for this job storage.create_build_params(b, params) except Exception as e: raise _fail_unexpected(b, e) trigger_runs(storage, proj_def, b, trigger, params, secrets, None, queue_priority) db.session.commit() return b
def test_simple(self): db.session.add(Build(self.proj, 1)) db.session.commit() builds = Build.query.all() self.assertEqual(1, len(builds)) self.assertEqual(1, builds[0].build_id) self.assertEqual('QUEUED', builds[0].status.name)
def test_promote_post(self): b = Build.create(self.project) db.session.add(Run(b, 'run0')) db.session.add(Run(b, 'run1')) url = 'http://localhost/projects/proj-1/builds/%d/promote' % b.build_id headers = { 'Content-type': 'application/json', } data = { 'name': 'release-x', 'annotation': 'foo bar', } # you can't promote an in-progress build _sign(url, headers, 'POST') self._post(url, json.dumps(data), headers, 400) for r in b.runs: r.set_status(BuildStatus.PASSED) self._post(url, json.dumps(data), headers, 201) db.session.refresh(b) self.assertEqual(BuildStatus.PROMOTED, b.status) self.assertEqual(data['name'], b.name) self.assertEqual(data['annotation'], b.annotation)
def test_promote_post(self): b = Build.create(self.project) db.session.add(Run(b, "run0")) db.session.add(Run(b, "run1")) url = "http://localhost/projects/proj-1/builds/%d/promote" % b.build_id headers = { "Content-type": "application/json", } data = { "name": "release-x", "annotation": "foo bar", } # you can't promote an in-progress build _sign(url, headers, "POST") self._post(url, json.dumps(data), headers, 400) for r in b.runs: r.set_status(BuildStatus.PASSED) self._post(url, json.dumps(data), headers, 201) db.session.refresh(b) self.assertEqual(BuildStatus.PROMOTED, b.status) self.assertEqual(data["name"], b.name) self.assertEqual(data["annotation"], b.annotation)
def test_worker_sync_builds_regression(self, storage): """Make sure scheduler takes into account other active projects for sync builds. """ if db.engine.dialect.name == "sqlite": self.skipTest("Test requires MySQL") rundef = {"run_url": "foo", "runner_url": "foo", "env": {}} storage().get_run_definition.return_value = json.dumps(rundef) w = Worker("w1", "ubuntu", 12, 2, "aarch64", "key", 2, ["aarch96"]) w.enlisted = True w.online = True db.session.add(w) self.create_projects("job-1") self.create_projects("job-2") p1, p2 = Project.query.all() p1.synchronous_builds = True db.session.commit() # add active build b = Build.create(p2) r = Run(b, "p2b1r1") r.status = BuildStatus.RUNNING r.host_tag = "aarch96" db.session.add(r) b = Build.create(p1) r = Run(b, "p1b1r1") r.host_tag = "aarch96" db.session.add(r) db.session.commit() headers = [ ("Content-type", "application/json"), ("Authorization", "Token key"), ] qs = "available_runners=1&foo=2" # This should make the p1b1r2 run running resp = self.client.get("/workers/w1/", headers=headers, query_string=qs) self.assertEqual(200, resp.status_code, resp.data) data = json.loads(resp.data.decode()) self.assertEqual(1, len(data["data"]["worker"]["run-defs"]))
def setUp(self): super().setUp() self.create_projects("job-1") self.proj = Project.query.filter_by(name="job-1").first_or_404() self.build = Build.create(self.proj) self.run = Run(self.build, "run-name") db.session.add(self.run) self.run.set_status(BuildStatus.FAILED)
def setUp(self): super().setUp() self.create_projects('job-1') self.proj = Project.query.filter_by(name='job-1').first_or_404() self.build = Build.create(self.proj) self.run = Run(self.build, 'name1') self.run.status = BuildStatus.RUNNING db.session.add(self.run) db.session.commit()
def test_promote_list_empty(self, storage): b = Build.create(self.project) db.session.add(Run(b, 'run0')) db.session.add(Run(b, 'run1')) for r in b.runs: r.set_status(BuildStatus.PASSED) url = '/projects/%s/promoted-builds/' % self.project.name builds = self.get_json(url)['builds'] self.assertEqual(0, len(builds))
def test_as_json(self): import jobserv.models orig = jobserv.models.BUILD_URL_FMT try: jobserv.models.BUILD_URL_FMT = 'foo {build} | {project}' b = Build.create(self.proj) data = b.as_json() self.assertEqual('foo 1 | job-1', data['web_url']) finally: jobserv.models.BUILD_URL_FMT = orig
def test_build_get_latest(self): b1 = Build.create(self.project) b1.trigger_name = "test-trigger-qs" b1.status = BuildStatus.PASSED db.session.commit() b = Build.create(self.project) b.status = BuildStatus.PASSED Build.create(self.project) data = self.get_json(self.urlbase + "latest/")["build"] self.assertEqual(b.build_id, data["build_id"]) url = self.urlbase + "latest/?trigger_name=test-trigger-qs" data = self.get_json(url)["build"] self.assertEqual(b1.build_id, data["build_id"]) b1.status = BuildStatus.PROMOTED db.session.commit() data = self.get_json(self.urlbase + "latest/?promoted=1")["build"] self.assertEqual(b1.build_id, data["build_id"])
def setUp(self): super().setUp() self.create_projects("proj-1") p = Project.query.all()[0] self.build = Build.create(p) self.urlbase = "/projects/proj-1/builds/1/runs/" jobserv.storage.base.JOBS_DIR = tempfile.mkdtemp() jobserv.models.JOBS_DIR = jobserv.storage.base.JOBS_DIR self.addCleanup(shutil.rmtree, jobserv.storage.base.JOBS_DIR)
def test_cancelled(self, update): """Ensure runs that were cancelled before they were assigned to a worker are failed.""" self.create_projects('proj1') b = Build.create(Project.query.all()[0]) r = Run(b, 'bla') r.status = BuildStatus.CANCELLING db.session.add(r) db.session.commit() _check_cancelled() self.assertEqual('FAILED', update.call_args[1]['status'])
def test_promote_get(self, storage): b = Build.create(self.project) db.session.add(Run(b, 'run0')) db.session.add(Run(b, 'run1')) for r in b.runs: r.set_status(BuildStatus.PASSED) t = Test(r, 't1', None, BuildStatus.PASSED) db.session.add(t) b.status = BuildStatus.PROMOTED b.name = 'release-X' b.annotation = 'foo bar' url = '/projects/%s/promoted-builds/release-X/' % self.project.name build = self.get_json(url)['build'] self.assertEqual('foo bar', build['annotation'])
def test_promote_get(self, storage): b = Build.create(self.project) db.session.add(Run(b, "run0")) db.session.add(Run(b, "run1")) for r in b.runs: r.set_status(BuildStatus.PASSED) t = Test(r, "t1", None, BuildStatus.PASSED) db.session.add(t) b.status = BuildStatus.PROMOTED b.name = "release-X" b.annotation = "foo bar" url = "/projects/%s/promoted-builds/release-X/" % self.project.name build = self.get_json(url)["build"] self.assertEqual("foo bar", build["annotation"])
def test_build_list(self): Build.create(self.project) Build.create(self.project) Build.create(self.project) builds = self.get_json(self.urlbase)['builds'] self.assertEqual(3, len(builds)) for i, b in enumerate(builds): self.assertEqual(3 - i, b['build_id'])
def test_get_build_stats(self): # we already have one build created from constructor for i in range(9): b = Build.create(self.proj) r = Run(b, "run-name") db.session.add(r) if i % 2: r.set_status(BuildStatus.FAILED) else: r.set_status(BuildStatus.PASSED) db.session.commit() stats = _get_build_stats(b) self.assertEqual(10, stats["total"]) self.assertEqual(5, stats["passes"]) self.assertEqual(50, stats["pass_rate"])
def setUp(self): super().setUp() self.create_projects('proj-1') p = Project.query.all()[0] b = Build.create(p) r = Run(b, 'run0') db.session.add(r) db.session.flush() self.test = Test(r, 'test1', 'test1-ctx') db.session.add(self.test) db.session.commit() self.urlbase = '/projects/proj-1/builds/1/runs/run0/tests/' jobserv.storage.base.JOBS_DIR = tempfile.mkdtemp() jobserv.models.JOBS_DIR = jobserv.storage.base.JOBS_DIR self.addCleanup(shutil.rmtree, jobserv.storage.base.JOBS_DIR)
def test_surge_simple(self): self.create_projects('proj1') b = Build.create(Project.query.all()[0]) for x in range(SURGE_SUPPORT_RATIO + 1): r = Run(b, 'run%d' % x) r.host_tag = 'amd64' db.session.add(r) db.session.commit() _check_queue() self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + '-amd64')) db.session.delete(Run.query.all()[0]) db.session.commit() worker_module.DETECT_FLAPPING = False _check_queue() self.assertFalse(os.path.exists(jobserv.worker.SURGE_FILE + '-amd64'))
def test_surge_complex(self): # we'll have two amd64 workers and one armhf worker = Worker("w2", "d", 1, 1, "amd64", "k", 1, "amd64") worker.enlisted = True worker.online = True db.session.add(worker) worker = Worker("w3", "d", 1, 1, "armhf", "k", 1, "armhf") worker.enlisted = True worker.online = True db.session.add(worker) db.session.commit() self.create_projects("proj1") b = Build.create(Project.query.all()[0]) for x in range(SURGE_SUPPORT_RATIO + 1): r = Run(b, "amd%d" % x) r.host_tag = "amd64" db.session.add(r) r = Run(b, "armhf%d" % x) r.host_tag = "armhf" db.session.add(r) db.session.commit() _check_queue() self.assertFalse(os.path.exists(jobserv.worker.SURGE_FILE + "-amd64")) self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + "-armhf")) # get us under surge for armhf db.session.delete(Run.query.filter(Run.host_tag == "armhf").first()) # and over surge for amd64 for x in range(SURGE_SUPPORT_RATIO + 1): r = Run(b, "run%d" % x) r.host_tag = "amd64" db.session.add(r) db.session.commit() worker_module.DETECT_FLAPPING = False _check_queue() self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + "-amd64")) self.assertFalse(os.path.exists(jobserv.worker.SURGE_FILE + "-armhf")) # make sure we know about deleted workers worker.deleted = True db.session.commit() _check_queue() self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + "-armhf"))
def test_surge_complex(self): # we'll have two amd64 workers and one armhf worker = Worker('w2', 'd', 1, 1, 'amd64', 'k', 1, 'amd64') worker.enlisted = True worker.online = True db.session.add(worker) worker = Worker('w3', 'd', 1, 1, 'armhf', 'k', 1, 'armhf') worker.enlisted = True worker.online = True db.session.add(worker) db.session.commit() self.create_projects('proj1') b = Build.create(Project.query.all()[0]) for x in range(SURGE_SUPPORT_RATIO + 1): r = Run(b, 'amd%d' % x) r.host_tag = 'amd64' db.session.add(r) r = Run(b, 'armhf%d' % x) r.host_tag = 'armhf' db.session.add(r) db.session.commit() _check_queue() self.assertFalse(os.path.exists(jobserv.worker.SURGE_FILE + '-amd64')) self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + '-armhf')) # get us under surge for armhf db.session.delete(Run.query.filter(Run.host_tag == 'armhf').first()) # and over surge for amd64 for x in range(SURGE_SUPPORT_RATIO + 1): r = Run(b, 'run%d' % x) r.host_tag = 'amd64' db.session.add(r) db.session.commit() worker_module.DETECT_FLAPPING = False _check_queue() self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + '-amd64')) self.assertFalse(os.path.exists(jobserv.worker.SURGE_FILE + '-armhf')) # make sure we know about deleted workers worker.deleted = True db.session.commit() _check_queue() self.assertTrue(os.path.exists(jobserv.worker.SURGE_FILE + '-armhf'))
def test_promote_list(self, storage): b = Build.create(self.project) db.session.add(Run(b, "run0")) db.session.add(Run(b, "run1")) for r in b.runs: r.set_status(BuildStatus.PASSED) t = Test(r, "t1", None, BuildStatus.PASSED) db.session.add(t) b.status = BuildStatus.PROMOTED b.name = "release-X" b.annotation = "foo bar" url = "/projects/%s/promoted-builds/" % self.project.name builds = self.get_json(url)["builds"] self.assertEqual(1, len(builds)) self.assertEqual("release-X", builds[0]["name"]) self.assertEqual("foo bar", builds[0]["annotation"]) self.assertEqual(["run0-t1", "run1-t1"], [x["name"] for x in builds[0]["tests"]])
def test_promote_list(self, storage): b = Build.create(self.project) db.session.add(Run(b, 'run0')) db.session.add(Run(b, 'run1')) for r in b.runs: r.set_status(BuildStatus.PASSED) t = Test(r, 't1', None, BuildStatus.PASSED) db.session.add(t) b.status = BuildStatus.PROMOTED b.name = 'release-X' b.annotation = 'foo bar' url = '/projects/%s/promoted-builds/' % self.project.name builds = self.get_json(url)['builds'] self.assertEqual(1, len(builds)) self.assertEqual('release-X', builds[0]['name']) self.assertEqual('foo bar', builds[0]['annotation']) self.assertEqual(['run0-t1', 'run1-t1'], [x['name'] for x in builds[0]['tests']])