Example #1
0
 def test_examples(self):
     for f in os.listdir(self.examples):
         if f[0] == ".":  # a vim swap file :)
             continue
         with open(os.path.join(self.examples, f)) as f:
             data = yaml.safe_load(f)
             ProjectDefinition.validate_data(data)
Example #2
0
 def test_host_tag_rundef_loopon_bad(self):
     with open(os.path.join(self.examples, 'host-tag.yml')) as f:
         data = yaml.safe_load(f)
         data['triggers'][0]['runs'][1]['loop-on'][0]['param'] = 'host-tagz'
         exp = '"host-tag" or loop-on host-tag parameter required'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #3
0
 def test_host_tag_rundef_loopon_bad(self):
     with open(os.path.join(self.examples, "host-tag.yml")) as f:
         data = yaml.safe_load(f)
         data["triggers"][0]["runs"][1]["loop-on"][0]["param"] = "host-tagz"
         exp = '"host-tag" or loop-on host-tag parameter required'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #4
0
 def test_bad_trigger(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         data["triggers"][0]["type"] = "doesnotexist"
         exp = "No such runner"
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #5
0
 def test_bad_trigger(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         data['triggers'][0]['type'] = 'doesnotexist'
         exp = 'No such runner'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #6
0
 def test_run_name_too_long(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         # cause an infinite loop for triggers
         data["triggers"][0]["runs"][0]["name"] = "1" * 80
         exp = "Name of run must be less than 80 characters"
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #7
0
 def test_simple_bad(self):
     # just make a schema with no "timeout" and ensure it fails
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         del data["timeout"]
         exp = "Cannot find required key 'timeout'"
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #8
0
 def test_run_name_too_long(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         # cause an infinite loop for triggers
         data['triggers'][0]['runs'][0]['name'] = '1' * 80
         exp = 'Name of run must be less than 80 characters'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #9
0
def _get_projdef(entry: PollerEntry) -> Optional[ProjectDefinition]:
    repo = entry.trigger.definition_repo or ''
    defile = entry.trigger.definition_file
    if not defile:
        defile = entry.trigger.project + '.yml'
    gitlab = entry.trigger.secrets.get('gitlabtok')
    gheader = entry.trigger.secrets.get('git.http.extraheader')

    headers = entry.projdef_headers
    token = entry.trigger.secrets.get('githubtok')

    if gitlab:
        headers['PRIVATE-TOKEN'] = gitlab
        p = urlparse(repo)
        proj_enc = quote_plus(p.path[1:].replace('.git', ''))
        url = p.scheme + '://' + p.netloc + '/api/v4/projects/' + proj_enc + \
            '/repository/files/' + quote_plus(defile) + '/raw?ref=master'
    elif 'github' in repo:
        if token:
            headers['Authorization'] = 'token ' + token
        url = repo.replace('github.com', 'raw.githubusercontent.com')
        if url[-1] != '/':
            url += '/'
        url += 'master/' + defile
    else:
        url = repo
        if not url.endswith('.git'):
            url += '.git'
        url = repo + '/plain/' + defile
        log.info('Assuming CGit style URL to file: %s', url)

    r = requests.get(url, headers=headers)
    if r.status_code == 401 and gheader:
        log.info('Authorization required using git header in secrets')
        key, val = gheader.split(':', 1)
        headers[key.strip()] = val.strip()
        r = requests.get(url, headers=headers)

    if r.status_code == 200:
        try:
            log.info('New version of project definition found for %s', url)
            data = yaml.safe_load(r.text)
            ProjectDefinition.validate_data(data)
            entry.definition = ProjectDefinition(data)
            # allows us to cache the resp
            headers['If-None-Match'] = r.headers['ETAG']
        except Exception:
            log.exception('Validation failed for %s ...skipping', url)
            return None
    elif r.status_code == 304:
        # it hasn't changed
        log.debug('Cache hit for %s', url)
    else:
        log.error('Unable to read definition(%s): %d: %s', url, r.status_code,
                  r.text)
        return None
    return entry.definition
Example #10
0
def _get_projdef(entry: PollerEntry) -> Optional[ProjectDefinition]:
    repo = entry.trigger.definition_repo or ""
    defile = entry.trigger.definition_file
    if not defile:
        defile = entry.trigger.project + ".yml"
    gitlab = entry.trigger.secrets.get("gitlabtok")
    gheader = entry.trigger.secrets.get("git.http.extraheader")

    headers = entry.projdef_headers
    token = entry.trigger.secrets.get("githubtok")

    if gitlab:
        headers["PRIVATE-TOKEN"] = gitlab
        p = urlparse(repo)
        proj_enc = quote_plus(p.path[1:].replace(".git", ""))
        url = (p.scheme + "://" + p.netloc + "/api/v4/projects/" + proj_enc +
               "/repository/files/" + quote_plus(defile) + "/raw?ref=master")
    elif "github" in repo:
        if token:
            headers["Authorization"] = "token " + token
        url = repo.replace("github.com", "raw.githubusercontent.com")
        if url[-1] != "/":
            url += "/"
        url += "master/" + defile
    else:
        url = repo
        if not url.endswith(".git"):
            url += ".git"
        url = repo + "/plain/" + defile
        log.info("Assuming CGit style URL to file: %s", url)

    r = requests.get(url, headers=headers)
    if r.status_code == 401 and gheader:
        log.info("Authorization required using git header in secrets")
        key, val = gheader.split(":", 1)
        headers[key.strip()] = val.strip()
        r = requests.get(url, headers=headers)

    if r.status_code == 200:
        try:
            log.info("New version of project definition found for %s", url)
            data = yaml.safe_load(r.text)
            ProjectDefinition.validate_data(data)
            entry.definition = ProjectDefinition(data)
            # allows us to cache the resp
            headers["If-None-Match"] = r.headers["ETAG"]
        except Exception:
            log.exception("Validation failed for %s ...skipping", url)
            return None
    elif r.status_code == 304:
        # it hasn't changed
        log.debug("Cache hit for %s", url)
    else:
        log.error("Unable to read definition(%s): %d: %s", url, r.status_code,
                  r.text)
        return None
    return entry.definition
Example #11
0
def simulator_validate():
    data = request.get_json()
    if not data:
        raise ApiError(400, "run-definition must be posted as json data")

    try:
        ProjectDefinition.validate_data(data)
    except Exception as e:
        raise ApiError(400, str(e))
    return jsendify({})
Example #12
0
 def test_bad_script_mutual_exclusion(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         data["triggers"][0]["runs"][0]["script-repo"] = {
             "name": "doesnotexsit",
             "path": "path",
         }
         exp = '"script" and "script-repo" are mutually exclusive'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #13
0
 def test_bad_script_mutual_exclusion(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         data['triggers'][0]['runs'][0]['script-repo'] = {
             'name': 'doesnotexsit',
             'path': 'path',
         }
         exp = '"script" and "script-repo" are mutually exclusive'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #14
0
 def test_bad_script_repo(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         del data["triggers"][0]["runs"][0]["script"]
         data["triggers"][0]["runs"][0]["script-repo"] = {
             "name": "doesnotexsit",
             "path": "path",
         }
         exp = "Script repo does not exist"
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #15
0
 def test_bad_script_repo(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         del data['triggers'][0]['runs'][0]['script']
         data['triggers'][0]['runs'][0]['script-repo'] = {
             'name': 'doesnotexsit',
             'path': 'path',
         }
         exp = 'Script repo does not exist'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #16
0
 def test_recursive_build_trigger(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         # cause an infinite loop for triggers
         data['triggers'][0]['triggers'] = [
             {
                 'name': 'unit-test'
             },
         ]
         exp = 'Trigger recursion depth exceeded'
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #17
0
 def test_recursive_build_trigger(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         # cause an infinite loop for triggers
         data["triggers"][0]["triggers"] = [
             {
                 "name": "unit-test"
             },
         ]
         exp = "Trigger recursion depth exceeded"
         with self.assertRaisesRegex(Exception, exp):
             ProjectDefinition.validate_data(data)
Example #18
0
 def test_host_tag_rundef(self):
     with open(os.path.join(self.examples, "host-tag.yml")) as f:
         data = yaml.safe_load(f)
         ProjectDefinition.validate_data(data)
         proj = ProjectDefinition(data)
         dbrun = Mock()
         dbrun.build.project.name = "jobserv"
         dbrun.name = "flake8"
         dbrun.build.build_id = 1
         dbrun.api_key = "123"
         trigger = proj._data["triggers"][0]
         run = trigger["runs"][0]
         rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
         data = json.loads(rundef)
         self.assertEqual("aarch6*", data["host-tag"])
         self.assertEqual("aarch6*", dbrun.host_tag)
Example #19
0
def trigger_build(project,
                  reason,
                  trigger_name,
                  params,
                  secrets,
                  proj_def,
                  queue_priority=0):
    proj_def = ProjectDefinition.validate_data(proj_def)
    b = Build.create(project)
    try:
        if reason:
            b.reason = reason
        if trigger_name:
            b.trigger_name = trigger_name
        storage = Storage()
        storage.create_project_definition(
            b, yaml.dump(proj_def._data, default_flow_style=False))
        trigger = proj_def.get_trigger(trigger_name)
        if not trigger:
            raise KeyError("Project(%s) does not have a trigger: %s" %
                           (project, trigger_name))
        if trigger.get("triggers"):
            # there's a trigger to run after all the runs for this trigger
            # completed. it will need to know the parameters for this job
            storage.create_build_params(b, params)
    except Exception as e:
        raise _fail_unexpected(b, e)

    trigger_runs(storage, proj_def, b, trigger, params, secrets, None,
                 queue_priority)
    db.session.commit()
    return b
Example #20
0
 def test_host_tag_rundef(self):
     with open(os.path.join(self.examples, 'host-tag.yml')) as f:
         data = yaml.safe_load(f)
         ProjectDefinition.validate_data(data)
         proj = ProjectDefinition(data)
         dbrun = Mock()
         dbrun.build.project.name = 'jobserv'
         dbrun.name = 'flake8'
         dbrun.build.build_id = 1
         dbrun.api_key = '123'
         trigger = proj._data['triggers'][0]
         run = trigger['runs'][0]
         rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
         data = json.loads(rundef)
         self.assertEqual('aarch6*', data['host-tag'])
         self.assertEqual('aarch6*', dbrun.host_tag)
Example #21
0
 def test_script_repo_rundef(self):
     with open(os.path.join(self.examples, 'python-github.yml')) as f:
         data = yaml.safe_load(f)
         del data['triggers'][0]['runs'][0]['script']
         data['triggers'][0]['runs'][0]['script-repo'] = {
             'name': 'foo',
             'path': 'path/foo.sh',
         }
         data['script-repos'] = {'foo': {'clone-url': 'url'}}
         ProjectDefinition.validate_data(data)
         proj = ProjectDefinition(data)
         dbrun = Mock()
         dbrun.build.project.name = 'jobserv'
         dbrun.name = 'flake8'
         dbrun.build.build_id = 1
         dbrun.api_key = '123'
         trigger = proj._data['triggers'][0]
         run = trigger['runs'][0]
         rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
         repo = json.loads(rundef).get('script-repo')
         self.assertEqual({'clone-url': 'url', 'path': 'path/foo.sh'}, repo)
Example #22
0
 def test_script_repo_rundef(self):
     with open(os.path.join(self.examples, "python-github.yml")) as f:
         data = yaml.safe_load(f)
         del data["triggers"][0]["runs"][0]["script"]
         data["triggers"][0]["runs"][0]["script-repo"] = {
             "name": "foo",
             "path": "path/foo.sh",
         }
         data["script-repos"] = {"foo": {"clone-url": "url"}}
         ProjectDefinition.validate_data(data)
         proj = ProjectDefinition(data)
         dbrun = Mock()
         dbrun.build.project.name = "jobserv"
         dbrun.name = "flake8"
         dbrun.build.build_id = 1
         dbrun.api_key = "123"
         trigger = proj._data["triggers"][0]
         run = trigger["runs"][0]
         rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
         repo = json.loads(rundef).get("script-repo")
         self.assertEqual({"clone-url": "url", "path": "path/foo.sh"}, repo)
Example #23
0
    def test_script_repo_token(self, url_for):
        url_for.return_value = "blah"
        with open(os.path.join(self.examples, "python-github.yml")) as f:
            data = yaml.safe_load(f)
            del data["triggers"][0]["runs"][0]["script"]
            data["triggers"][0]["runs"][0]["script-repo"] = {
                "name": "foo",
                "path": "path/foo.sh",
            }
            data["script-repos"] = {"foo": {"clone-url": "url", "token": "f"}}
            ProjectDefinition.validate_data(data)
            proj = ProjectDefinition(data)
            dbrun = Mock()
            dbrun.build.project.name = "jobserv"
            dbrun.build.build_id = 1
            dbrun.name = "flake8"
            dbrun.api_key = "secret"
            trigger = proj._data["triggers"][0]
            run = trigger["runs"][0]

            with self.assertRaises(ApiError):
                proj.get_run_definition(dbrun, run, trigger, {}, {})
Example #24
0
    def test_script_repo_token(self, url_for):
        url_for.return_value = 'blah'
        with open(os.path.join(self.examples, 'python-github.yml')) as f:
            data = yaml.safe_load(f)
            del data['triggers'][0]['runs'][0]['script']
            data['triggers'][0]['runs'][0]['script-repo'] = {
                'name': 'foo',
                'path': 'path/foo.sh',
            }
            data['script-repos'] = {'foo': {'clone-url': 'url', 'token': 'f'}}
            ProjectDefinition.validate_data(data)
            proj = ProjectDefinition(data)
            dbrun = Mock()
            dbrun.build.project.name = 'jobserv'
            dbrun.build.build_id = 1
            dbrun.name = 'flake8'
            dbrun.api_key = 'secret'
            trigger = proj._data['triggers'][0]
            run = trigger['runs'][0]

            with self.assertRaises(ApiError):
                proj.get_run_definition(dbrun, run, trigger, {}, {})
Example #25
0
    def test_bad_container_auth(self):
        with open(os.path.join(self.examples, 'private-container.yml')) as f:
            data = yaml.safe_load(f)
            proj = ProjectDefinition.validate_data(data)

            dbrun = Mock()
            dbrun.build.project.name = 'jobserv'
            dbrun.build.build_id = 1
            dbrun.name = 'flake8'
            dbrun.api_key = 'secret'
            trigger = proj._data['triggers'][0]
            run = trigger['runs'][0]

            exp = 'not defined in the run\'s secrets'
            with self.assertRaisesRegex(ApiError, exp):
                proj.get_run_definition(dbrun, run, trigger, {}, {})
Example #26
0
    def test_bad_container_auth(self):
        with open(os.path.join(self.examples, "private-container.yml")) as f:
            data = yaml.safe_load(f)
            proj = ProjectDefinition.validate_data(data)

            dbrun = Mock()
            dbrun.build.project.name = "jobserv"
            dbrun.build.build_id = 1
            dbrun.name = "flake8"
            dbrun.api_key = "secret"
            trigger = proj._data["triggers"][0]
            run = trigger["runs"][0]

            exp = "not defined in the run's secrets"
            with self.assertRaisesRegex(ApiError, exp):
                proj.get_run_definition(dbrun, run, trigger, {}, {})
Example #27
0
    def test_params(self):
        """Make sure we get project, trigger, and run params"""
        with open(os.path.join(self.examples, "parameters.yml")) as f:
            data = yaml.safe_load(f)
            proj = ProjectDefinition.validate_data(data)

            dbrun = Mock()
            dbrun.build.project.name = "jobserv"
            dbrun.name = "basic"
            dbrun.build.build_id = 1
            dbrun.api_key = "123"
            trigger = proj._data["triggers"][0]
            run = trigger["runs"][0]
            rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
            data = json.loads(rundef)
            self.assertEqual("GLOBAL", data["env"]["GLOBAL_PARAM"])
            self.assertEqual("RUN", data["env"]["RUN_PARAM"])
            self.assertEqual("TRIGGER", data["env"]["TRIGGER_PARAM"])
Example #28
0
    def test_params(self):
        """Make sure we get project, trigger, and run params"""
        with open(os.path.join(self.examples, 'parameters.yml')) as f:
            data = yaml.safe_load(f)
            proj = ProjectDefinition.validate_data(data)

            dbrun = Mock()
            dbrun.build.project.name = 'jobserv'
            dbrun.name = 'basic'
            dbrun.build.build_id = 1
            dbrun.api_key = '123'
            trigger = proj._data['triggers'][0]
            run = trigger['runs'][0]
            rundef = proj.get_run_definition(dbrun, run, trigger, {}, {})
            data = json.loads(rundef)
            self.assertEqual('GLOBAL', data['env']['GLOBAL_PARAM'])
            self.assertEqual('RUN', data['env']['RUN_PARAM'])
            self.assertEqual('TRIGGER', data['env']['TRIGGER_PARAM'])
Example #29
0
    def test_loop_on(self):
        with open(os.path.join(self.examples, 'python-github.yml')) as f:
            data = yaml.safe_load(f)
            run = {
                'name':
                'compile-{loop}',
                'container':
                'foo',
                'host-tag':
                'amd64',
                'script':
                'unit-test',
                'loop-on': [
                    {
                        'param': 'BOARD',
                        'values': ['carbon', 'nitrogen']
                    },
                    {
                        'param': 'ZEPHYR',
                        'values': ['upstream', 'dev', 'test']
                    },
                    {
                        'param': 'COMPILER',
                        'values': ['gcc', 'llvm']
                    },
                ],
                'triggers': [
                    {
                        'name': 'trigger',
                        'run-names': '{name}-{loop}'
                    },
                ]
            }
            data['triggers'][0]['runs'].insert(1, run)
            data['triggers'].append({
                'type':
                'simple',
                'name':
                'trigger',
                'runs': [{
                    'name': 'trigger',
                    'container': 'foo',
                    'host-tag': 'amd64',
                    'script': 'flake8',
                }]
            })
            ProjectDefinition.validate_data(data)
            runs = ProjectDefinition(data)._data['triggers'][0]['runs']

            # we should have 2 + (len(BOARD) * len(ZEPHYR) * len(COMPILER))
            self.assertEqual(14, len(runs))

            # they should be inserted in between the original runs in a
            # predictable order
            self.assertEqual('unit-test', runs[0]['name'])

            self.assertEqual('compile-carbon-upstream-gcc', runs[1]['name'])
            self.assertEqual('compile-carbon-upstream-llvm', runs[2]['name'])
            self.assertEqual('compile-carbon-dev-gcc', runs[3]['name'])
            self.assertEqual('compile-carbon-dev-llvm', runs[4]['name'])
            self.assertEqual('compile-carbon-test-gcc', runs[5]['name'])
            self.assertEqual('compile-carbon-test-llvm', runs[6]['name'])
            self.assertEqual('compile-nitrogen-upstream-gcc', runs[7]['name'])
            self.assertEqual('compile-nitrogen-upstream-llvm', runs[8]['name'])
            self.assertEqual('compile-nitrogen-dev-gcc', runs[9]['name'])
            self.assertEqual('compile-nitrogen-dev-llvm', runs[10]['name'])
            self.assertEqual('compile-nitrogen-test-gcc', runs[11]['name'])
            self.assertEqual('compile-nitrogen-test-llvm', runs[12]['name'])

            self.assertEqual('flake8', runs[13]['name'])
Example #30
0
    def test_loop_on(self):
        with open(os.path.join(self.examples, "python-github.yml")) as f:
            data = yaml.safe_load(f)
            run = {
                "name":
                "compile-{loop}",
                "container":
                "foo",
                "host-tag":
                "amd64",
                "script":
                "unit-test",
                "loop-on": [
                    {
                        "param": "BOARD",
                        "values": ["carbon", "nitrogen"]
                    },
                    {
                        "param": "ZEPHYR",
                        "values": ["upstream", "dev", "test"]
                    },
                    {
                        "param": "COMPILER",
                        "values": ["gcc", "llvm"]
                    },
                ],
                "triggers": [
                    {
                        "name": "trigger",
                        "run-names": "{name}-{loop}"
                    },
                ],
            }
            data["triggers"][0]["runs"].insert(1, run)
            data["triggers"].append({
                "type":
                "simple",
                "name":
                "trigger",
                "runs": [{
                    "name": "trigger",
                    "container": "foo",
                    "host-tag": "amd64",
                    "script": "flake8",
                }],
            })
            ProjectDefinition.validate_data(data)
            runs = ProjectDefinition(data)._data["triggers"][0]["runs"]

            # we should have 2 + (len(BOARD) * len(ZEPHYR) * len(COMPILER))
            self.assertEqual(14, len(runs))

            # they should be inserted in between the original runs in a
            # predictable order
            self.assertEqual("unit-test", runs[0]["name"])

            self.assertEqual("compile-carbon-upstream-gcc", runs[1]["name"])
            self.assertEqual("compile-carbon-upstream-llvm", runs[2]["name"])
            self.assertEqual("compile-carbon-dev-gcc", runs[3]["name"])
            self.assertEqual("compile-carbon-dev-llvm", runs[4]["name"])
            self.assertEqual("compile-carbon-test-gcc", runs[5]["name"])
            self.assertEqual("compile-carbon-test-llvm", runs[6]["name"])
            self.assertEqual("compile-nitrogen-upstream-gcc", runs[7]["name"])
            self.assertEqual("compile-nitrogen-upstream-llvm", runs[8]["name"])
            self.assertEqual("compile-nitrogen-dev-gcc", runs[9]["name"])
            self.assertEqual("compile-nitrogen-dev-llvm", runs[10]["name"])
            self.assertEqual("compile-nitrogen-test-gcc", runs[11]["name"])
            self.assertEqual("compile-nitrogen-test-llvm", runs[12]["name"])

            self.assertEqual("flake8", runs[13]["name"])