Пример #1
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        if options.del_jobs and options.del_views:
            raise JenkinsJobsException(
                '"--views-only" and "--jobs-only" cannot be used together.')

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j['name'] for j in parser.jobs]
            views = [v['name'] for v in parser.views]
        else:
            jobs = options.name
            views = options.name

        if options.del_jobs:
            builder.delete_jobs(jobs)
        elif options.del_views:
            builder.delete_views(views)
        else:
            builder.delete_jobs(jobs)
            builder.delete_views(views)
Пример #2
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        if options.del_jobs and options.del_views:
            raise JenkinsJobsException(
                '"--views-only" and "--jobs-only" cannot be used together.')

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j["name"] for j in parser.jobs]
            views = [v["name"] for v in parser.views]
        else:
            jobs = options.name
            views = options.name

        if options.del_jobs:
            builder.delete_jobs(jobs)
        elif options.del_views:
            builder.delete_views(views)
        else:
            builder.delete_jobs(jobs)
            builder.delete_views(views)
Пример #3
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = Builder(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(
            options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        xml_generator = XmlJobGenerator(registry)

        parser.load_files(options.path)
        registry.set_parser_data(parser.data)

        job_data_list = parser.expandYaml(registry, options.names)

        xml_jobs = xml_generator.generateXML(job_data_list)

        jobs = parser.jobs
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(jobs), str(step - orig))

        return builder, xml_jobs
Пример #4
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = JenkinsManager(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(
            options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        xml_job_generator = XmlJobGenerator(registry)
        xml_view_generator = XmlViewGenerator(registry)

        parser.load_files(options.path)
        registry.set_parser_data(parser.data)

        job_data_list, view_data_list = parser.expandYaml(
            registry, options.names)

        xml_jobs = xml_job_generator.generateXML(job_data_list)
        xml_views = xml_view_generator.generateXML(view_data_list)

        jobs = parser.jobs
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(jobs), str(step - orig))

        return builder, xml_jobs, xml_views
Пример #5
0
    def generate_jjb_xml(self):
        """render jjb yaml to xml"""

        jjb_config = self.get_jjb_config()
        options_names = []  # normally a list of jobs globs for targeting
        files_path = glob.glob("./**/", recursive=True)

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, self.plugins_list)

        xml_job_generator = XmlJobGeneratorWithRaw(registry)
        xml_view_generator = XmlViewGenerator(registry)

        parser.load_files(files_path)
        registry.set_parser_data(parser.data)

        job_data_list, view_data_list = parser.expandYaml(
            registry, options_names)

        def job_data_filter_wrapper(job_data):
            return self._jobs_filter_func(job_data["name"])

        xml_jobs = xml_job_generator.generateXML(
            filter(job_data_filter_wrapper, job_data_list))
        jobs = self.jobs
        for xml_job in xml_jobs:
            formatted_xml_str = self.xml_dump(xml_job.xml)
            jobs[xml_job.name].after_xml = formatted_xml_str

        xml_views = xml_view_generator.generateXML(
            filter(job_data_filter_wrapper, view_data_list))
        views = self.views
        for xml_view in xml_views:
            views[xml_view.name].after_xml = self.xml_dump(xml_view.xml)
Пример #6
0
    def import_missing(self) -> list:
        """import missing jobs as xml"""
        missing = [
            item for item in self.jobs.values() if item.changetype() is DELETE
        ]
        if not missing:
            return []

        class FakeRegistry:
            modules = []

        def job_name_to_file_name(j_name):
            _part = re.sub(r"[\/]", "_", j_name)
            return f"./{_part}.xml"

        xml_job_name_pairs = []

        if os.path.exists(self.raw_xml_yaml_path):
            parser = YamlParser(self.get_jjb_config())
            parser.load_files([self.raw_xml_yaml_path])
            job_data_list, _ = parser.expandYaml(FakeRegistry, [])
            for job_data in job_data_list:
                name = job_data["name"]
                fname = job_name_to_file_name(name)
                assert os.path.exists(fname)
                xml_job_name_pairs.append((name, fname))
        template = jinja2.Template(
            """\
---
{% for job_name, file_name in raw_xml_jobs -%}
- job:
   name: {{ job_name |tojson }}
   project-type: raw
   raw: !include-raw: {{ file_name }}

{% endfor -%}
""",
            undefined=jinja2.StrictUndefined,
        )

        for mxml in missing:
            job_name = mxml.name
            file_name = job_name_to_file_name(job_name)
            job_config = mxml.before_xml
            xml_job_name_pairs.append((job_name, file_name))
            assert not os.path.exists(file_name)
            with open(file_name, "w") as fp:
                fp.write(job_config)
            log.info("Imported %s to %s", job_name, file_name)

        with open(self.raw_xml_yaml_path, "w") as fp:
            template.stream(raw_xml_jobs=xml_job_name_pairs).dump(fp)
        return missing
Пример #7
0
    def get_jobs(self, wd, name):
        ini_path = "{}/{}.ini".format(wd, name)
        config_path = "{}/config.yaml".format(wd)

        args = ["--conf", ini_path, "test", config_path]
        jjb = self.get_jjb(args)
        builder = JenkinsManager(jjb.jjb_config)
        registry = ModuleRegistry(jjb.jjb_config, builder.plugins_list)
        parser = YamlParser(jjb.jjb_config)
        parser.load_files(jjb.options.path)
        jobs, _ = parser.expandYaml(registry, jjb.options.names)

        return jobs
Пример #8
0
    def test_retain_anchors_enabled(self):
        """
        Verify that anchors are retained across files if retain_anchors is
        enabled in the config.
        """

        files = ["custom_retain_anchors_include001.yaml",
                 "custom_retain_anchors.yaml"]

        jjb_config = JJBConfig()
        jjb_config.yamlparser['retain_anchors'] = True
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])
Пример #9
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j["name"] for j in parser.jobs]
        else:
            jobs = options.name

        builder.delete_jobs(jobs)
Пример #10
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j['name'] for j in parser.jobs]
        else:
            jobs = options.name

        builder.delete_jobs(jobs)
Пример #11
0
    def test_retain_anchors_default(self):
        """
        Verify that anchors are NOT retained across files by default.
        """

        files = ["custom_retain_anchors_include001.yaml",
                 "custom_retain_anchors.yaml"]

        jjb_config = JJBConfig()
        # use the default value for retain_anchors
        jjb_config.validate()
        j = YamlParser(jjb_config)
        with ExpectedException(yaml.composer.ComposerError,
                               "found undefined alias.*"):
            j.load_files([os.path.join(self.fixtures_path, f) for f in files])
Пример #12
0
    def execute(self, options, jjb_config):
        builder = Builder(jjb_config)

        fn = options.path

        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        for jobs_glob in options.name:
            parser = YamlParser(jjb_config)

            if fn:
                parser.load_files(fn)
                parser.expandYaml(registry, [jobs_glob])
                jobs = [j['name'] for j in parser.jobs]
            else:
                jobs = [jobs_glob]

            builder.delete_jobs(jobs)
Пример #13
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = Builder(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config, builder.plugins_list)
        parser.load_files(options.path)
        parser.expandYaml(options.names)
        parser.generateXML()

        jobs = parser.jobs
        step = time.time()
        logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))

        return builder, parser.xml_jobs
Пример #14
0
    def execute(self, options, jjb_config):
        builder = Builder(jjb_config)

        fn = options.path

        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        for jobs_glob in options.name:
            parser = YamlParser(jjb_config)

            if fn:
                parser.load_files(fn)
                parser.expandYaml(registry, [jobs_glob])
                jobs = [j['name'] for j in parser.jobs]
            else:
                jobs = [jobs_glob]

            builder.delete_job(jobs)
Пример #15
0
    def test_multiple_same_anchor_in_multiple_toplevel_yaml(self):
        """
        Verify that anchors/aliases only span use of '!include' tag

        To ensure that any yaml loaded by the include tag is in the same
        space as the top level file, but individual top level yaml definitions
        are treated by the yaml loader as independent.
        """

        files = ["custom_same_anchor-001-part1.yaml",
                 "custom_same_anchor-001-part2.yaml"]

        jjb_config = JJBConfig()
        jjb_config.jenkins['url'] = 'http://example.com'
        jjb_config.jenkins['user'] = '******'
        jjb_config.jenkins['password'] = '******'
        jjb_config.builder['plugins_info'] = []
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])
Пример #16
0
    def test_retain_anchors_enabled_j2_yaml(self):
        """
        Verify that anchors are retained across files and are properly retained when using !j2-yaml.
        """

        files = [
            "custom_retain_anchors_j2_yaml_include001.yaml",
            "custom_retain_anchors_j2_yaml.yaml",
        ]

        jjb_config = JJBConfig()
        jjb_config.yamlparser["retain_anchors"] = True
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])

        registry = ModuleRegistry(jjb_config, None)
        jobs, _ = j.expandYaml(registry)
        self.assertEqual(jobs[0]["builders"][0]["shell"],
                         "docker run ubuntu:latest")
Пример #17
0
def assert_case(case_name):
    case_source, case_result = (os.path.join(BASE_PATH, case_name + ext) for ext in ['.yml', '.xml'])
    jjb_config = JJBConfig()
    builder = Builder(jjb_config)

    # Generate XML
    parser = YamlParser(jjb_config)
    registry = ModuleRegistry(jjb_config, builder.plugins_list)
    xml_generator = XmlJobGenerator(registry)

    parser.load_files(case_source)
    registry.set_parser_data(parser.data)

    job_data_list = parser.expandYaml(registry, [])

    xml_jobs = xml_generator.generateXML(job_data_list)

    result_xml = ET.XML(xml_jobs[0].output())
    expected_xml = ET.XML(open(case_result).read())
    assert ET.tostring(result_xml) == ET.tostring(expected_xml)
Пример #18
0
    def import_missing(self) -> list:
        """import missing jobs as xml"""
        missing = [
            item for item in self.jobs.values() if item.changetype() is DELETE
        ]
        if not missing:
            return []

        class FakeRegistry:
            modules = []

        def job_name_to_file_name(j_name):
            _part = re.sub(r"[\/]", "_", j_name)
            return f"./{_part}.xml"

        xml_job_name_pairs = []

        if os.path.exists(self.raw_xml_yaml_path):
            parser = YamlParser(self.get_jjb_config())
            parser.load_files([self.raw_xml_yaml_path])
            job_data_list, _ = parser.expandYaml(FakeRegistry, [])
            for job_data in job_data_list:
                name = job_data["name"]
                fname = job_name_to_file_name(name)
                assert os.path.exists(fname)
                xml_job_name_pairs.append((name, fname))
        template = self.jenv.get_template("raw_xml_import.j2")

        for mxml in missing:
            job_name = mxml.name
            file_name = job_name_to_file_name(job_name)
            job_config = mxml.before_xml
            xml_job_name_pairs.append((job_name, file_name))
            assert not os.path.exists(file_name)
            with open(file_name, "w") as fp:
                fp.write(job_config)
            log.info("Imported %s to %s", job_name, file_name)

        with open(self.raw_xml_yaml_path, "w") as fp:
            template.stream(raw_xml_jobs=xml_job_name_pairs).dump(fp)
        return missing
Пример #19
0
def assert_case(case_name):
    case_source, case_result = (os.path.join(BASE_PATH, case_name + ext)
                                for ext in ['.yml', '.xml'])
    jjb_config = JJBConfig()
    builder = Builder(jjb_config)

    # Generate XML
    parser = YamlParser(jjb_config)
    registry = ModuleRegistry(jjb_config, builder.plugins_list)
    xml_generator = XmlJobGenerator(registry)

    parser.load_files(case_source)
    registry.set_parser_data(parser.data)

    job_data_list = parser.expandYaml(registry, [])

    xml_jobs = xml_generator.generateXML(job_data_list)

    result_xml = ET.XML(xml_jobs[0].output())
    expected_xml = ET.XML(open(case_result).read())
    assert ET.tostring(result_xml) == ET.tostring(expected_xml)
Пример #20
0
    def get_job_webhooks_data(self):
        job_webhooks_data = {}
        for name, wd in self.working_dirs.items():
            ini_path = '{}/{}.ini'.format(wd, name)
            config_path = '{}/config.yaml'.format(wd)

            args = ['--conf', ini_path, 'test', config_path]
            jjb = self.get_jjb(args)
            builder = JenkinsManager(jjb.jjb_config)
            registry = ModuleRegistry(jjb.jjb_config, builder.plugins_list)
            parser = YamlParser(jjb.jjb_config)
            parser.load_files(jjb.options.path)

            jobs, _ = parser.expandYaml(registry, jjb.options.names)

            for job in jobs:
                try:
                    project_url_raw = job['properties'][0]['github']['url']
                    if 'https://github.com' in project_url_raw:
                        continue
                    job_url = \
                        '{}/project/{}'.format(self.instances[name],
                                               job['name'])
                    project_url = \
                        project_url_raw.strip('/').replace('.git', '')
                    gitlab_triggers = job['triggers'][0]['gitlab']
                    mr_trigger = gitlab_triggers['trigger-merge-request']
                    trigger = 'mr' if mr_trigger else 'push'
                    hook = {
                        'job_url': job_url,
                        'trigger': trigger,
                    }
                    job_webhooks_data.setdefault(project_url, [])
                    job_webhooks_data[project_url].append(hook)
                except KeyError:
                    continue
        return job_webhooks_data
Пример #21
0
class Builder(object):
    def __init__(self, jjb_config):
        self.jenkins = Jenkins(jjb_config.jenkins['url'],
                               jjb_config.jenkins['user'],
                               jjb_config.jenkins['password'],
                               jjb_config.jenkins['timeout'])
        self.cache = CacheStorage(jjb_config.jenkins['url'],
                                  flush=jjb_config.builder['flush_cache'])
        self._plugins_list = jjb_config.builder['plugins_info']

        self.jjb_config = jjb_config

    @property
    def plugins_list(self):
        if self._plugins_list is None:
            self._plugins_list = self.jenkins.get_plugins_info()
        return self._plugins_list

    def delete_old_managed(self, keep=None):
        jobs = self.jenkins.get_jobs()
        deleted_jobs = 0
        if keep is None:
            keep = [job.name for job in self.parser.xml_jobs]
        for job in jobs:
            if job['name'] not in keep:
                if self.jenkins.is_managed(job['name']):
                    logger.info("Removing obsolete jenkins job {0}"
                                .format(job['name']))
                    self.delete_job(job['name'])
                    deleted_jobs += 1
                else:
                    logger.info("Not deleting unmanaged jenkins job %s",
                                job['name'])
            else:
                logger.debug("Keeping job %s", job['name'])
        return deleted_jobs

    def delete_job(self, jobs_glob, fn=None):
        self.parser = YamlParser(self.jjb_config, self.plugins_list)

        if fn:
            self.parser.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if(self.cache.is_cached(job)):
                self.cache.set(job, '')
        self.cache.save()

    def delete_all_jobs(self):
        jobs = self.jenkins.get_jobs()
        logger.info("Number of jobs to delete:  %d", len(jobs))
        self.jenkins.delete_all_jobs()
        # Need to clear the JJB cache after deletion
        self.cache.clear()

    @parallelize
    def changed(self, job):
        md5 = job.md5()

        changed = (self.jjb_config.builder['ignore_cache'] or
                   self.cache.has_changed(job.name, md5))
        if not changed:
            logger.debug("'{0}' has not changed".format(job.name))
        return changed

    def update_jobs(self, input_fn, jobs_glob=None, output=None,
                    n_workers=None):
        orig = time.time()

        self.parser = YamlParser(self.jjb_config, self.plugins_list)
        self.parser.load_files(input_fn)

        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(self.parser.jobs), str(step - orig))

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        if output:
            # ensure only wrapped once
            if hasattr(output, 'write'):
                output = utils.wrap_stream(output)

            for job in self.parser.xml_jobs:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                with io.open(output_fn, 'w', encoding='utf-8') as f:
                    f.write(job.output().decode('utf-8'))
            return self.parser.xml_jobs, len(self.parser.xml_jobs)

        # Filter out the jobs that did not change
        logging.debug('Filtering %d jobs for changed jobs',
                      len(self.parser.xml_jobs))
        step = time.time()
        jobs = [job for job in self.parser.xml_jobs
                if self.changed(job)]
        logging.debug("Filtered for changed jobs in %ss",
                      (time.time() - step))

        if not jobs:
            return [], 0

        # Update the jobs
        logging.debug('Updating jobs')
        step = time.time()
        p_params = [{'job': job} for job in jobs]
        results = self.parallel_update_job(
            n_workers=n_workers,
            parallelize=p_params)
        logging.debug("Parsing results")
        # generalize the result parsing, as a parallelized job always returns a
        # list
        if len(p_params) in (1, 0):
            results = [results]
        for result in results:
            if isinstance(result, Exception):
                raise result
            else:
                # update in-memory cache
                j_name, j_md5 = result
                self.cache.set(j_name, j_md5)
        # write cache to disk
        self.cache.save()
        logging.debug("Updated %d jobs in %ss",
                      len(jobs),
                      time.time() - step)
        logging.debug("Total run took %ss", (time.time() - orig))
        return jobs, len(jobs)

    @parallelize
    def parallel_update_job(self, job):
        self.jenkins.update_job(job.name, job.output().decode('utf-8'))
        return (job.name, job.md5())

    def update_job(self, input_fn, jobs_glob=None, output=None):
        logging.warn('Current update_job function signature is deprecated and '
                     'will change in future versions to the signature of the '
                     'new parallel_update_job')
        return self.update_jobs(input_fn, jobs_glob, output)