def test_bad_trigger(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) data['triggers'][0]['type'] = 'doesnotexist' exp = 'No such runner' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_host_tag_rundef_loopon_bad(self): with open(os.path.join(self.examples, 'host-tag.yml')) as f: data = yaml.safe_load(f) data['triggers'][0]['runs'][1]['loop-on'][0]['param'] = 'host-tagz' exp = '"host-tag" or loop-on host-tag parameter required' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_examples(self): for f in os.listdir(self.examples): if f[0] == ".": # a vim swap file :) continue with open(os.path.join(self.examples, f)) as f: data = yaml.safe_load(f) ProjectDefinition.validate_data(data)
def test_host_tag_rundef_loopon_bad(self): with open(os.path.join(self.examples, "host-tag.yml")) as f: data = yaml.safe_load(f) data["triggers"][0]["runs"][1]["loop-on"][0]["param"] = "host-tagz" exp = '"host-tag" or loop-on host-tag parameter required' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_bad_trigger(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) data["triggers"][0]["type"] = "doesnotexist" exp = "No such runner" with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def _handle_triggers(storage, run): if not run.complete or not run.trigger: return projdef = ProjectDefinition( yaml.safe_load(storage.get_project_definition(run.build))) rundef = json.loads(storage.get_run_definition(run)) secrets = rundef.get('secrets') params = rundef.get('env', {}) params['H_TRIGGER_URL'] = request.url run_trigger = projdef.get_trigger(run.trigger) try: for rt in run_trigger['runs']: if rt['name'] == run.name: if run.status == BuildStatus.PASSED: _create_triggers(projdef, storage, run.build, params, secrets, rt.get('triggers', []), run_trigger['type'], run.queue_priority) db.session.refresh(run.build) run.build.refresh_status() if run.build.complete: _handle_build_complete(projdef, storage, run.build, params, secrets, run_trigger) except ValueError as e: current_app.logger.exception( 'Caught integrity error and failed run: %d', run.id) run.set_status(BuildStatus.FAILED) content = storage.get_artifact_content(run, 'console.log') with storage.console_logfd(run, 'w') as f: f.write(content) f.write('\n\n== ERROR TRIGGERING RUN: %s\n' % e) storage.copy_log(run)
def test_run_name_too_long(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) # cause an infinite loop for triggers data["triggers"][0]["runs"][0]["name"] = "1" * 80 exp = "Name of run must be less than 80 characters" with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_simple_bad(self): # just make a schema with no "timeout" and ensure it fails with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) del data["timeout"] exp = "Cannot find required key 'timeout'" with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_run_name_too_long(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) # cause an infinite loop for triggers data['triggers'][0]['runs'][0]['name'] = '1' * 80 exp = 'Name of run must be less than 80 characters' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def _get_projdef(entry: PollerEntry) -> Optional[ProjectDefinition]: repo = entry.trigger.definition_repo or "" defile = entry.trigger.definition_file if not defile: defile = entry.trigger.project + ".yml" gitlab = entry.trigger.secrets.get("gitlabtok") gheader = entry.trigger.secrets.get("git.http.extraheader") headers = entry.projdef_headers token = entry.trigger.secrets.get("githubtok") if gitlab: headers["PRIVATE-TOKEN"] = gitlab p = urlparse(repo) proj_enc = quote_plus(p.path[1:].replace(".git", "")) url = (p.scheme + "://" + p.netloc + "/api/v4/projects/" + proj_enc + "/repository/files/" + quote_plus(defile) + "/raw?ref=master") elif "github" in repo: if token: headers["Authorization"] = "token " + token url = repo.replace("github.com", "raw.githubusercontent.com") if url[-1] != "/": url += "/" url += "master/" + defile else: url = repo if not url.endswith(".git"): url += ".git" url = repo + "/plain/" + defile log.info("Assuming CGit style URL to file: %s", url) r = requests.get(url, headers=headers) if r.status_code == 401 and gheader: log.info("Authorization required using git header in secrets") key, val = gheader.split(":", 1) headers[key.strip()] = val.strip() r = requests.get(url, headers=headers) if r.status_code == 200: try: log.info("New version of project definition found for %s", url) data = yaml.safe_load(r.text) ProjectDefinition.validate_data(data) entry.definition = ProjectDefinition(data) # allows us to cache the resp headers["If-None-Match"] = r.headers["ETAG"] except Exception: log.exception("Validation failed for %s ...skipping", url) return None elif r.status_code == 304: # it hasn't changed log.debug("Cache hit for %s", url) else: log.error("Unable to read definition(%s): %d: %s", url, r.status_code, r.text) return None return entry.definition
def _get_projdef(entry: PollerEntry) -> Optional[ProjectDefinition]: repo = entry.trigger.definition_repo or '' defile = entry.trigger.definition_file if not defile: defile = entry.trigger.project + '.yml' gitlab = entry.trigger.secrets.get('gitlabtok') gheader = entry.trigger.secrets.get('git.http.extraheader') headers = entry.projdef_headers token = entry.trigger.secrets.get('githubtok') if gitlab: headers['PRIVATE-TOKEN'] = gitlab p = urlparse(repo) proj_enc = quote_plus(p.path[1:].replace('.git', '')) url = p.scheme + '://' + p.netloc + '/api/v4/projects/' + proj_enc + \ '/repository/files/' + quote_plus(defile) + '/raw?ref=master' elif 'github' in repo: if token: headers['Authorization'] = 'token ' + token url = repo.replace('github.com', 'raw.githubusercontent.com') if url[-1] != '/': url += '/' url += 'master/' + defile else: url = repo if not url.endswith('.git'): url += '.git' url = repo + '/plain/' + defile log.info('Assuming CGit style URL to file: %s', url) r = requests.get(url, headers=headers) if r.status_code == 401 and gheader: log.info('Authorization required using git header in secrets') key, val = gheader.split(':', 1) headers[key.strip()] = val.strip() r = requests.get(url, headers=headers) if r.status_code == 200: try: log.info('New version of project definition found for %s', url) data = yaml.safe_load(r.text) ProjectDefinition.validate_data(data) entry.definition = ProjectDefinition(data) # allows us to cache the resp headers['If-None-Match'] = r.headers['ETAG'] except Exception: log.exception('Validation failed for %s ...skipping', url) return None elif r.status_code == 304: # it hasn't changed log.debug('Cache hit for %s', url) else: log.error('Unable to read definition(%s): %d: %s', url, r.status_code, r.text) return None return entry.definition
def test_bad_script_mutual_exclusion(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) data['triggers'][0]['runs'][0]['script-repo'] = { 'name': 'doesnotexsit', 'path': 'path', } exp = '"script" and "script-repo" are mutually exclusive' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def simulator_validate(): data = request.get_json() if not data: raise ApiError(400, "run-definition must be posted as json data") try: ProjectDefinition.validate_data(data) except Exception as e: raise ApiError(400, str(e)) return jsendify({})
def test_bad_script_mutual_exclusion(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) data["triggers"][0]["runs"][0]["script-repo"] = { "name": "doesnotexsit", "path": "path", } exp = '"script" and "script-repo" are mutually exclusive' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_bad_script_repo(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) del data["triggers"][0]["runs"][0]["script"] data["triggers"][0]["runs"][0]["script-repo"] = { "name": "doesnotexsit", "path": "path", } exp = "Script repo does not exist" with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_bad_script_repo(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) del data['triggers'][0]['runs'][0]['script'] data['triggers'][0]['runs'][0]['script-repo'] = { 'name': 'doesnotexsit', 'path': 'path', } exp = 'Script repo does not exist' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_recursive_build_trigger(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) # cause an infinite loop for triggers data['triggers'][0]['triggers'] = [ { 'name': 'unit-test' }, ] exp = 'Trigger recursion depth exceeded' with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_recursive_build_trigger(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) # cause an infinite loop for triggers data["triggers"][0]["triggers"] = [ { "name": "unit-test" }, ] exp = "Trigger recursion depth exceeded" with self.assertRaisesRegex(Exception, exp): ProjectDefinition.validate_data(data)
def test_host_tag_rundef(self): with open(os.path.join(self.examples, "host-tag.yml")) as f: data = yaml.safe_load(f) ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = "jobserv" dbrun.name = "flake8" dbrun.build.build_id = 1 dbrun.api_key = "123" trigger = proj._data["triggers"][0] run = trigger["runs"][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) data = json.loads(rundef) self.assertEqual("aarch6*", data["host-tag"]) self.assertEqual("aarch6*", dbrun.host_tag)
def trigger_build(project, reason, trigger_name, params, secrets, proj_def, queue_priority=0): proj_def = ProjectDefinition.validate_data(proj_def) b = Build.create(project) try: if reason: b.reason = reason if trigger_name: b.trigger_name = trigger_name storage = Storage() storage.create_project_definition( b, yaml.dump(proj_def._data, default_flow_style=False)) trigger = proj_def.get_trigger(trigger_name) if not trigger: raise KeyError("Project(%s) does not have a trigger: %s" % (project, trigger_name)) if trigger.get("triggers"): # there's a trigger to run after all the runs for this trigger # completed. it will need to know the parameters for this job storage.create_build_params(b, params) except Exception as e: raise _fail_unexpected(b, e) trigger_runs(storage, proj_def, b, trigger, params, secrets, None, queue_priority) db.session.commit() return b
def test_host_tag_rundef(self): with open(os.path.join(self.examples, 'host-tag.yml')) as f: data = yaml.safe_load(f) ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = 'jobserv' dbrun.name = 'flake8' dbrun.build.build_id = 1 dbrun.api_key = '123' trigger = proj._data['triggers'][0] run = trigger['runs'][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) data = json.loads(rundef) self.assertEqual('aarch6*', data['host-tag']) self.assertEqual('aarch6*', dbrun.host_tag)
def test_script_repo_rundef(self): with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) del data["triggers"][0]["runs"][0]["script"] data["triggers"][0]["runs"][0]["script-repo"] = { "name": "foo", "path": "path/foo.sh", } data["script-repos"] = {"foo": {"clone-url": "url"}} ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = "jobserv" dbrun.name = "flake8" dbrun.build.build_id = 1 dbrun.api_key = "123" trigger = proj._data["triggers"][0] run = trigger["runs"][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) repo = json.loads(rundef).get("script-repo") self.assertEqual({"clone-url": "url", "path": "path/foo.sh"}, repo)
def test_script_repo_rundef(self): with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) del data['triggers'][0]['runs'][0]['script'] data['triggers'][0]['runs'][0]['script-repo'] = { 'name': 'foo', 'path': 'path/foo.sh', } data['script-repos'] = {'foo': {'clone-url': 'url'}} ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = 'jobserv' dbrun.name = 'flake8' dbrun.build.build_id = 1 dbrun.api_key = '123' trigger = proj._data['triggers'][0] run = trigger['runs'][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) repo = json.loads(rundef).get('script-repo') self.assertEqual({'clone-url': 'url', 'path': 'path/foo.sh'}, repo)
def _handle_triggers(storage, run): if not run.complete or not run.trigger: return projdef = ProjectDefinition( yaml.safe_load(storage.get_project_definition(run.build))) rundef = json.loads(storage.get_run_definition(run)) secrets = rundef.get("secrets") params = rundef.get("env", {}) params["H_TRIGGER_URL"] = request.url run_trigger = projdef.get_trigger(run.trigger) try: for rt in run_trigger["runs"]: if rt["name"] == run.name: if run.status == BuildStatus.PASSED: _create_triggers( projdef, storage, run.build, params, secrets, rt.get("triggers", []), run_trigger, run.queue_priority, ) db.session.refresh(run.build) run.build.refresh_status() if run.build.complete: _handle_build_complete(projdef, storage, run.build, params, secrets, run_trigger) except ValueError as e: current_app.logger.exception( "Caught integrity error and failed run: %d", run.id) run.set_status(BuildStatus.FAILED) content = storage.get_artifact_content(run, "console.log") with storage.console_logfd(run, "w") as f: f.write(content) f.write("\n\n== ERROR TRIGGERING RUN: %s\n" % e) storage.copy_log(run)
def test_bad_container_auth(self): with open(os.path.join(self.examples, "private-container.yml")) as f: data = yaml.safe_load(f) proj = ProjectDefinition.validate_data(data) dbrun = Mock() dbrun.build.project.name = "jobserv" dbrun.build.build_id = 1 dbrun.name = "flake8" dbrun.api_key = "secret" trigger = proj._data["triggers"][0] run = trigger["runs"][0] exp = "not defined in the run's secrets" with self.assertRaisesRegex(ApiError, exp): proj.get_run_definition(dbrun, run, trigger, {}, {})
def test_bad_container_auth(self): with open(os.path.join(self.examples, 'private-container.yml')) as f: data = yaml.safe_load(f) proj = ProjectDefinition.validate_data(data) dbrun = Mock() dbrun.build.project.name = 'jobserv' dbrun.build.build_id = 1 dbrun.name = 'flake8' dbrun.api_key = 'secret' trigger = proj._data['triggers'][0] run = trigger['runs'][0] exp = 'not defined in the run\'s secrets' with self.assertRaisesRegex(ApiError, exp): proj.get_run_definition(dbrun, run, trigger, {}, {})
def test_params(self): """Make sure we get project, trigger, and run params""" with open(os.path.join(self.examples, 'parameters.yml')) as f: data = yaml.safe_load(f) proj = ProjectDefinition.validate_data(data) dbrun = Mock() dbrun.build.project.name = 'jobserv' dbrun.name = 'basic' dbrun.build.build_id = 1 dbrun.api_key = '123' trigger = proj._data['triggers'][0] run = trigger['runs'][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) data = json.loads(rundef) self.assertEqual('GLOBAL', data['env']['GLOBAL_PARAM']) self.assertEqual('RUN', data['env']['RUN_PARAM']) self.assertEqual('TRIGGER', data['env']['TRIGGER_PARAM'])
def test_params(self): """Make sure we get project, trigger, and run params""" with open(os.path.join(self.examples, "parameters.yml")) as f: data = yaml.safe_load(f) proj = ProjectDefinition.validate_data(data) dbrun = Mock() dbrun.build.project.name = "jobserv" dbrun.name = "basic" dbrun.build.build_id = 1 dbrun.api_key = "123" trigger = proj._data["triggers"][0] run = trigger["runs"][0] rundef = proj.get_run_definition(dbrun, run, trigger, {}, {}) data = json.loads(rundef) self.assertEqual("GLOBAL", data["env"]["GLOBAL_PARAM"]) self.assertEqual("RUN", data["env"]["RUN_PARAM"]) self.assertEqual("TRIGGER", data["env"]["TRIGGER_PARAM"])
def test_script_repo_token(self, url_for): url_for.return_value = 'blah' with open(os.path.join(self.examples, 'python-github.yml')) as f: data = yaml.safe_load(f) del data['triggers'][0]['runs'][0]['script'] data['triggers'][0]['runs'][0]['script-repo'] = { 'name': 'foo', 'path': 'path/foo.sh', } data['script-repos'] = {'foo': {'clone-url': 'url', 'token': 'f'}} ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = 'jobserv' dbrun.build.build_id = 1 dbrun.name = 'flake8' dbrun.api_key = 'secret' trigger = proj._data['triggers'][0] run = trigger['runs'][0] with self.assertRaises(ApiError): proj.get_run_definition(dbrun, run, trigger, {}, {})
def test_script_repo_token(self, url_for): url_for.return_value = "blah" with open(os.path.join(self.examples, "python-github.yml")) as f: data = yaml.safe_load(f) del data["triggers"][0]["runs"][0]["script"] data["triggers"][0]["runs"][0]["script-repo"] = { "name": "foo", "path": "path/foo.sh", } data["script-repos"] = {"foo": {"clone-url": "url", "token": "f"}} ProjectDefinition.validate_data(data) proj = ProjectDefinition(data) dbrun = Mock() dbrun.build.project.name = "jobserv" dbrun.build.build_id = 1 dbrun.name = "flake8" dbrun.api_key = "secret" trigger = proj._data["triggers"][0] run = trigger["runs"][0] with self.assertRaises(ApiError): proj.get_run_definition(dbrun, run, trigger, {}, {})