Example #1
0
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = self._read_utf8_content()

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        registry = ModuleRegistry(config)
        registry.set_parser_data(parser.data)
        job_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=operator.attrgetter('name'))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in xml_jobs)

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.REPORT_NDIFF)
        )
Example #2
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = JenkinsManager(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(
            options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        xml_job_generator = XmlJobGenerator(registry)
        xml_view_generator = XmlViewGenerator(registry)

        parser.load_files(options.path)
        registry.set_parser_data(parser.data)

        job_data_list, view_data_list = parser.expandYaml(
            registry, options.names)

        xml_jobs = xml_job_generator.generateXML(job_data_list)
        xml_views = xml_view_generator.generateXML(view_data_list)

        jobs = parser.jobs
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(jobs), str(step - orig))

        return builder, xml_jobs, xml_views
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = self._read_utf8_content()

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        registry = ModuleRegistry(config)
        registry.set_parser_data(parser.data)
        job_data_list, view_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=AlphanumSort)

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in xml_jobs)

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
Example #4
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        if options.del_jobs and options.del_views:
            raise JenkinsJobsException(
                '"--views-only" and "--jobs-only" cannot be used together.')

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j['name'] for j in parser.jobs]
            views = [v['name'] for v in parser.views]
        else:
            jobs = options.name
            views = options.name

        if options.del_jobs:
            builder.delete_jobs(jobs)
        elif options.del_views:
            builder.delete_views(views)
        else:
            builder.delete_jobs(jobs)
            builder.delete_views(views)
Example #5
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = Builder(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(
            options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        xml_generator = XmlJobGenerator(registry)

        parser.load_files(options.path)
        registry.set_parser_data(parser.data)

        job_data_list = parser.expandYaml(registry, options.names)

        xml_jobs = xml_generator.generateXML(job_data_list)

        jobs = parser.jobs
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(jobs), str(step - orig))

        return builder, xml_jobs
Example #6
0
    def generate_jjb_xml(self):
        """render jjb yaml to xml"""

        jjb_config = self.get_jjb_config()
        options_names = []  # normally a list of jobs globs for targeting
        files_path = glob.glob("./**/", recursive=True)

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, self.plugins_list)

        xml_job_generator = XmlJobGeneratorWithRaw(registry)
        xml_view_generator = XmlViewGenerator(registry)

        parser.load_files(files_path)
        registry.set_parser_data(parser.data)

        job_data_list, view_data_list = parser.expandYaml(
            registry, options_names)

        def job_data_filter_wrapper(job_data):
            return self._jobs_filter_func(job_data["name"])

        xml_jobs = xml_job_generator.generateXML(
            filter(job_data_filter_wrapper, job_data_list))
        jobs = self.jobs
        for xml_job in xml_jobs:
            formatted_xml_str = self.xml_dump(xml_job.xml)
            jobs[xml_job.name].after_xml = formatted_xml_str

        xml_views = xml_view_generator.generateXML(
            filter(job_data_filter_wrapper, view_data_list))
        views = self.views
        for xml_view in xml_views:
            views[xml_view.name].after_xml = self.xml_dump(xml_view.xml)
Example #7
0
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = (self._read_utf8_content().strip().replace(
            "<BLANKLINE>", "").replace("\n\n", "\n"))

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        registry = ModuleRegistry(config, plugins_info)
        registry.set_parser_data(parser.data)
        job_data_list, view_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=AlphanumSort)

        # check reference files are under correct path for folders
        prefix = os.path.dirname(self.in_filename)
        # split using '/' since fullname uses URL path separator
        expected_folders = list(
            set([
                os.path.normpath(
                    os.path.join(prefix,
                                 "/".join(job_data["name"].split("/")[:-1])))
                for job_data in job_data_list
            ]))
        actual_folders = [os.path.dirname(f) for f in self.out_filenames]

        six.assertCountEqual(
            self,
            expected_folders,
            actual_folders,
            "Output file under wrong path, was '%s', should be '%s'" % (
                self.out_filenames[0],
                os.path.join(expected_folders[0],
                             os.path.basename(self.out_filenames[0])),
            ),
        )

        # Prettify generated XML
        pretty_xml = (u"\n".join(job.output().decode("utf-8")
                                 for job in xml_jobs).strip().replace(
                                     "\n\n", "\n"))

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF),
        )
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = self._read_utf8_content().strip() \
            .replace('<BLANKLINE>', '').replace('\n\n', '\n')

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        registry = ModuleRegistry(config, plugins_info)
        registry.set_parser_data(parser.data)
        job_data_list, view_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=AlphanumSort)

        # check reference files are under correct path for folders
        prefix = os.path.dirname(self.in_filename)
        # split using '/' since fullname uses URL path separator
        expected_folders = list(set([
            os.path.normpath(
                os.path.join(prefix,
                             '/'.join(job_data['name'].split('/')[:-1])))
            for job_data in job_data_list
        ]))
        actual_folders = [os.path.dirname(f) for f in self.out_filenames]

        six.assertCountEqual(
            self,
            expected_folders, actual_folders,
            "Output file under wrong path, was '%s', should be '%s'" %
            (self.out_filenames[0],
                os.path.join(expected_folders[0],
                             os.path.basename(self.out_filenames[0]))))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in xml_jobs) \
            .strip().replace('\n\n', '\n')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                            doctest.ELLIPSIS |
                                            doctest.REPORT_NDIFF))
Example #9
0
    def import_missing(self) -> list:
        """import missing jobs as xml"""
        missing = [
            item for item in self.jobs.values() if item.changetype() is DELETE
        ]
        if not missing:
            return []

        class FakeRegistry:
            modules = []

        def job_name_to_file_name(j_name):
            _part = re.sub(r"[\/]", "_", j_name)
            return f"./{_part}.xml"

        xml_job_name_pairs = []

        if os.path.exists(self.raw_xml_yaml_path):
            parser = YamlParser(self.get_jjb_config())
            parser.load_files([self.raw_xml_yaml_path])
            job_data_list, _ = parser.expandYaml(FakeRegistry, [])
            for job_data in job_data_list:
                name = job_data["name"]
                fname = job_name_to_file_name(name)
                assert os.path.exists(fname)
                xml_job_name_pairs.append((name, fname))
        template = jinja2.Template(
            """\
---
{% for job_name, file_name in raw_xml_jobs -%}
- job:
   name: {{ job_name |tojson }}
   project-type: raw
   raw: !include-raw: {{ file_name }}

{% endfor -%}
""",
            undefined=jinja2.StrictUndefined,
        )

        for mxml in missing:
            job_name = mxml.name
            file_name = job_name_to_file_name(job_name)
            job_config = mxml.before_xml
            xml_job_name_pairs.append((job_name, file_name))
            assert not os.path.exists(file_name)
            with open(file_name, "w") as fp:
                fp.write(job_config)
            log.info("Imported %s to %s", job_name, file_name)

        with open(self.raw_xml_yaml_path, "w") as fp:
            template.stream(raw_xml_jobs=xml_job_name_pairs).dump(fp)
        return missing
Example #10
0
    def get_jobs(self, wd, name):
        ini_path = "{}/{}.ini".format(wd, name)
        config_path = "{}/config.yaml".format(wd)

        args = ["--conf", ini_path, "test", config_path]
        jjb = self.get_jjb(args)
        builder = JenkinsManager(jjb.jjb_config)
        registry = ModuleRegistry(jjb.jjb_config, builder.plugins_list)
        parser = YamlParser(jjb.jjb_config)
        parser.load_files(jjb.options.path)
        jobs, _ = parser.expandYaml(registry, jjb.options.names)

        return jobs
Example #11
0
    def test_retain_anchors_enabled(self):
        """
        Verify that anchors are retained across files if retain_anchors is
        enabled in the config.
        """

        files = ["custom_retain_anchors_include001.yaml",
                 "custom_retain_anchors.yaml"]

        jjb_config = JJBConfig()
        jjb_config.yamlparser['retain_anchors'] = True
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])
Example #12
0
    def load_files(self, fn):
        self.parser = YamlParser(self.global_config, self.plugins_list)

        # handle deprecated behavior, and check that it's not a file like
        # object as these may implement the '__iter__' attribute.
        if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
            logger.warning(
                'Passing single elements for the `fn` argument in '
                'Builder.load_files is deprecated. Please update your code '
                'to use a list as support for automatic conversion will be '
                'removed in a future version.')
            fn = [fn]

        files_to_process = []
        for path in fn:
            if not hasattr(path, 'read') and os.path.isdir(path):
                files_to_process.extend([os.path.join(path, f)
                                         for f in os.listdir(path)
                                         if (f.endswith('.yml')
                                             or f.endswith('.yaml'))])
            else:
                files_to_process.append(path)

        # symlinks used to allow loading of sub-dirs can result in duplicate
        # definitions of macros and templates when loading all from top-level
        unique_files = []
        for f in files_to_process:
            if hasattr(f, 'read'):
                unique_files.append(f)
                continue
            rpf = os.path.realpath(f)
            if rpf not in unique_files:
                unique_files.append(rpf)
            else:
                logger.warning("File '%s' already added as '%s', ignoring "
                               "reference to avoid duplicating yaml "
                               "definitions." % (f, rpf))

        for in_file in unique_files:
            # use of ask-for-permissions instead of ask-for-forgiveness
            # performs better when low use cases.
            if hasattr(in_file, 'name'):
                fname = in_file.name
            else:
                fname = in_file
            logger.debug("Parsing YAML file {0}".format(fname))
            if hasattr(in_file, 'read'):
                self.parser.parse_fp(in_file)
            else:
                self.parser.parse(in_file)
Example #13
0
    def test_yaml_snippet(self):
        expected_xml = self._read_utf8_content()

        if self.conf_filename:
            config = configparser.ConfigParser()
            config.readfp(open(self.conf_filename))
        else:
            config = None
        parser = YamlParser(config)
        parser.parse(self.in_filename)

        # Generate the XML tree
        parser.expandYaml()
        parser.generateXML()

        parser.xml_jobs.sort(key=operator.attrgetter('name'))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in parser.xml_jobs)

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.NORMALIZE_WHITESPACE |
                                              doctest.REPORT_NDIFF)
        )
Example #14
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j["name"] for j in parser.jobs]
        else:
            jobs = options.name

        builder.delete_jobs(jobs)
Example #15
0
    def test_retain_anchors_default(self):
        """
        Verify that anchors are NOT retained across files by default.
        """

        files = ["custom_retain_anchors_include001.yaml",
                 "custom_retain_anchors.yaml"]

        jjb_config = JJBConfig()
        # use the default value for retain_anchors
        jjb_config.validate()
        j = YamlParser(jjb_config)
        with ExpectedException(yaml.composer.ComposerError,
                               "found undefined alias.*"):
            j.load_files([os.path.join(self.fixtures_path, f) for f in files])
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if ('project-type' in yaml_content):
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(registry)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(registry)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(registry)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(registry)
            elif (yaml_content['project-type'] == "externaljob"):
                project = project_externaljob.ExternalJob(registry)

        if 'view-type' in yaml_content:
            if yaml_content['view-type'] == "list":
                project = view_list.List(None)
            elif yaml_content['view-type'] == "pipeline":
                project = view_pipeline.Pipeline(None)
            else:
                raise InvalidAttributeError('view-type',
                                            yaml_content['view-type'])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
    def execute(self, options, jjb_config):
        builder = Builder(jjb_config)

        fn = options.path

        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        for jobs_glob in options.name:
            parser = YamlParser(jjb_config)

            if fn:
                parser.load_files(fn)
                parser.expandYaml(registry, [jobs_glob])
                jobs = [j['name'] for j in parser.jobs]
            else:
                jobs = [jobs_glob]

            builder.delete_jobs(jobs)
Example #18
0
    def test_yaml_snippet(self):
        config = self._get_config()

        expected_xml = self._read_utf8_content()

        parser = YamlParser(config)
        parser.parse(self.in_filename)

        registry = ModuleRegistry(config)
        registry.set_parser_data(parser.data)
        job_data_list, view_data_list = parser.expandYaml(registry)

        # Generate the XML tree
        xml_generator = XmlJobGenerator(registry)
        xml_jobs = xml_generator.generateXML(job_data_list)

        xml_jobs.sort(key=AlphanumSort)

        # check reference files are under correct path for folders
        prefix = os.path.dirname(self.in_filename)
        # split using '/' since fullname uses URL path separator
        expected_folders = list(
            set([
                os.path.normpath(
                    os.path.join(prefix,
                                 '/'.join(job_data['name'].split('/')[:-1])))
                for job_data in job_data_list
            ]))
        actual_folders = [os.path.dirname(f) for f in self.out_filenames]

        six.assertCountEqual(
            self, expected_folders, actual_folders,
            "Output file under wrong path, was '%s', should be '%s'" %
            (self.out_filenames[0],
             os.path.join(expected_folders[0],
                          os.path.basename(self.out_filenames[0]))))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in xml_jobs)

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
    def test_yaml_snippet(self):
        expected_xml = self._read_utf8_content()

        if self.conf_filename:
            config = configparser.ConfigParser()
            config.readfp(open(self.conf_filename))
        else:
            config = None
        parser = YamlParser(config)
        parser.parse(self.in_filename)

        # Generate the XML tree
        parser.expandYaml()
        parser.generateXML()

        parser.xml_jobs.sort(key=operator.attrgetter('name'))

        # Prettify generated XML
        pretty_xml = u"\n".join(job.output().decode('utf-8')
                                for job in parser.xml_jobs)

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.NORMALIZE_WHITESPACE |
                                              doctest.REPORT_NDIFF)
        )
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        if self.conf_filename is not None:
            config = configparser.ConfigParser()
            config.readfp(open(self.conf_filename))
        else:
            config = {}

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)
        if isinstance(yaml_content, list):
            yaml_content = yaml_content[0]
        project = None
        if ('project-type' in yaml_content):
            yaml_content['project-type']
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(None)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(None)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(None)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(None)
            elif (yaml_content['project-type'] == "folder"):
                project = folders.Folder(None)

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')
        print yaml_content
        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(config, plugins_info)

        pub = self.klass(parser.registry)

        # Generate the XML tree directly with modules/general
        pub.gen_xml(parser, xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE
                | doctest.REPORT_NDIFF))
    def test_multiple_same_anchor_in_multiple_toplevel_yaml(self):
        """
        Verify that anchors/aliases only span use of '!include' tag

        To ensure that any yaml loaded by the include tag is in the same
        space as the top level file, but individual top level yaml definitions
        are treated by the yaml loader as independent.
        """

        files = ["custom_same_anchor-001-part1.yaml",
                 "custom_same_anchor-001-part2.yaml"]

        jjb_config = JJBConfig()
        jjb_config.jenkins['url'] = 'http://example.com'
        jjb_config.jenkins['user'] = '******'
        jjb_config.jenkins['password'] = '******'
        jjb_config.builder['plugins_info'] = []
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])
Example #22
0
    def test_retain_anchors_enabled_j2_yaml(self):
        """
        Verify that anchors are retained across files and are properly retained when using !j2-yaml.
        """

        files = [
            "custom_retain_anchors_j2_yaml_include001.yaml",
            "custom_retain_anchors_j2_yaml.yaml",
        ]

        jjb_config = JJBConfig()
        jjb_config.yamlparser["retain_anchors"] = True
        jjb_config.validate()
        j = YamlParser(jjb_config)
        j.load_files([os.path.join(self.fixtures_path, f) for f in files])

        registry = ModuleRegistry(jjb_config, None)
        jobs, _ = j.expandYaml(registry)
        self.assertEqual(jobs[0]["builders"][0]["shell"],
                         "docker run ubuntu:latest")
def assert_case(case_name):
    case_source, case_result = (os.path.join(BASE_PATH, case_name + ext) for ext in ['.yml', '.xml'])
    jjb_config = JJBConfig()
    builder = Builder(jjb_config)

    # Generate XML
    parser = YamlParser(jjb_config)
    registry = ModuleRegistry(jjb_config, builder.plugins_list)
    xml_generator = XmlJobGenerator(registry)

    parser.load_files(case_source)
    registry.set_parser_data(parser.data)

    job_data_list = parser.expandYaml(registry, [])

    xml_jobs = xml_generator.generateXML(job_data_list)

    result_xml = ET.XML(xml_jobs[0].output())
    expected_xml = ET.XML(open(case_result).read())
    assert ET.tostring(result_xml) == ET.tostring(expected_xml)
Example #24
0
    def import_missing(self) -> list:
        """import missing jobs as xml"""
        missing = [
            item for item in self.jobs.values() if item.changetype() is DELETE
        ]
        if not missing:
            return []

        class FakeRegistry:
            modules = []

        def job_name_to_file_name(j_name):
            _part = re.sub(r"[\/]", "_", j_name)
            return f"./{_part}.xml"

        xml_job_name_pairs = []

        if os.path.exists(self.raw_xml_yaml_path):
            parser = YamlParser(self.get_jjb_config())
            parser.load_files([self.raw_xml_yaml_path])
            job_data_list, _ = parser.expandYaml(FakeRegistry, [])
            for job_data in job_data_list:
                name = job_data["name"]
                fname = job_name_to_file_name(name)
                assert os.path.exists(fname)
                xml_job_name_pairs.append((name, fname))
        template = self.jenv.get_template("raw_xml_import.j2")

        for mxml in missing:
            job_name = mxml.name
            file_name = job_name_to_file_name(job_name)
            job_config = mxml.before_xml
            xml_job_name_pairs.append((job_name, file_name))
            assert not os.path.exists(file_name)
            with open(file_name, "w") as fp:
                fp.write(job_config)
            log.info("Imported %s to %s", job_name, file_name)

        with open(self.raw_xml_yaml_path, "w") as fp:
            template.stream(raw_xml_jobs=xml_job_name_pairs).dump(fp)
        return missing
Example #25
0
def assert_case(case_name):
    case_source, case_result = (os.path.join(BASE_PATH, case_name + ext)
                                for ext in ['.yml', '.xml'])
    jjb_config = JJBConfig()
    builder = Builder(jjb_config)

    # Generate XML
    parser = YamlParser(jjb_config)
    registry = ModuleRegistry(jjb_config, builder.plugins_list)
    xml_generator = XmlJobGenerator(registry)

    parser.load_files(case_source)
    registry.set_parser_data(parser.data)

    job_data_list = parser.expandYaml(registry, [])

    xml_jobs = xml_generator.generateXML(job_data_list)

    result_xml = ET.XML(xml_jobs[0].output())
    expected_xml = ET.XML(open(case_result).read())
    assert ET.tostring(result_xml) == ET.tostring(expected_xml)
Example #26
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        if options.del_jobs and options.del_views:
            raise JenkinsJobsException(
                '"--views-only" and "--jobs-only" cannot be used together.')

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j["name"] for j in parser.jobs]
            views = [v["name"] for v in parser.views]
        else:
            jobs = options.name
            views = options.name

        if options.del_jobs:
            builder.delete_jobs(jobs)
        elif options.del_views:
            builder.delete_views(views)
        else:
            builder.delete_jobs(jobs)
            builder.delete_views(views)
Example #27
0
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename is not None:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        parser = YamlParser(config, plugins_info)

        pub = self.klass(parser.registry)

        project = None
        if ('project-type' in yaml_content):
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(parser.registry)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(parser.registry)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(parser.registry)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(parser.registry)
            elif (yaml_content['project-type'] == "externaljob"):
                project = project_externaljob.ExternalJob(parser.registry)

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')

        # Generate the XML tree directly with modules/general
        pub.gen_xml(parser, xml_project, yaml_content)

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.REPORT_NDIFF)
        )
Example #28
0
    def get_job_webhooks_data(self):
        job_webhooks_data = {}
        for name, wd in self.working_dirs.items():
            ini_path = '{}/{}.ini'.format(wd, name)
            config_path = '{}/config.yaml'.format(wd)

            args = ['--conf', ini_path, 'test', config_path]
            jjb = self.get_jjb(args)
            builder = JenkinsManager(jjb.jjb_config)
            registry = ModuleRegistry(jjb.jjb_config, builder.plugins_list)
            parser = YamlParser(jjb.jjb_config)
            parser.load_files(jjb.options.path)

            jobs, _ = parser.expandYaml(registry, jjb.options.names)

            for job in jobs:
                try:
                    project_url_raw = job['properties'][0]['github']['url']
                    if 'https://github.com' in project_url_raw:
                        continue
                    job_url = \
                        '{}/project/{}'.format(self.instances[name],
                                               job['name'])
                    project_url = \
                        project_url_raw.strip('/').replace('.git', '')
                    gitlab_triggers = job['triggers'][0]['gitlab']
                    mr_trigger = gitlab_triggers['trigger-merge-request']
                    trigger = 'mr' if mr_trigger else 'push'
                    hook = {
                        'job_url': job_url,
                        'trigger': trigger,
                    }
                    job_webhooks_data.setdefault(project_url, [])
                    job_webhooks_data[project_url].append(hook)
                except KeyError:
                    continue
        return job_webhooks_data
Example #29
0
    def execute(self, options, jjb_config):
        builder = Builder(jjb_config)

        parser = YamlParser(jjb_config, builder.plugins_list)

        fn = options.path

        for jobs_glob in options.name:
            parser = YamlParser(jjb_config, builder.plugins_list)

            if fn:
                parser.load_files(fn)
                parser.expandYaml([jobs_glob])
                jobs = [j['name'] for j in parser.jobs]
            else:
                jobs = [jobs_glob]

            builder.delete_job(jobs)
Example #30
0
    def delete_job(self, jobs_glob, fn=None):
        self.parser = YamlParser(self.jjb_config, self.plugins_list)

        if fn:
            self.parser.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if(self.cache.is_cached(job)):
                self.cache.set(job, '')
        self.cache.save()
Example #31
0
    def _generate_xmljobs(self, options, jjb_config=None):
        builder = Builder(jjb_config)

        logger.info("Updating jobs in {0} ({1})".format(options.path, options.names))
        orig = time.time()

        # Generate XML
        parser = YamlParser(jjb_config, builder.plugins_list)
        parser.load_files(options.path)
        parser.expandYaml(options.names)
        parser.generateXML()

        jobs = parser.jobs
        step = time.time()
        logging.debug("%d XML files generated in %ss", len(jobs), str(step - orig))

        return builder, parser.xml_jobs
Example #32
0
    def execute(self, options, jjb_config):
        builder = JenkinsManager(jjb_config)

        fn = options.path
        registry = ModuleRegistry(jjb_config, builder.plugins_list)
        parser = YamlParser(jjb_config)

        if fn:
            parser.load_files(fn)
            parser.expandYaml(registry, options.name)
            jobs = [j['name'] for j in parser.jobs]
        else:
            jobs = options.name

        builder.delete_jobs(jobs)
Example #33
0
class Builder(object):
    def __init__(self, jenkins_url, jenkins_user, jenkins_password,
                 config=None, jenkins_timeout=_DEFAULT_TIMEOUT,
                 ignore_cache=False, flush_cache=False, plugins_list=None):
        self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password,
                               jenkins_timeout)
        self.cache = CacheStorage(jenkins_url, flush=flush_cache)
        self.global_config = config
        self.ignore_cache = ignore_cache
        self._plugins_list = plugins_list

    @property
    def plugins_list(self):
        if self._plugins_list is None:
            self._plugins_list = self.jenkins.get_plugins_info()
        return self._plugins_list

    def load_files(self, fn):
        self.parser = YamlParser(self.global_config, self.plugins_list)

        # handle deprecated behavior, and check that it's not a file like
        # object as these may implement the '__iter__' attribute.
        if not hasattr(fn, '__iter__') or hasattr(fn, 'read'):
            logger.warning(
                'Passing single elements for the `fn` argument in '
                'Builder.load_files is deprecated. Please update your code '
                'to use a list as support for automatic conversion will be '
                'removed in a future version.')
            fn = [fn]

        files_to_process = []
        for path in fn:
            if not hasattr(path, 'read') and os.path.isdir(path):
                files_to_process.extend([os.path.join(path, f)
                                         for f in os.listdir(path)
                                         if (f.endswith('.yml')
                                             or f.endswith('.yaml'))])
            else:
                files_to_process.append(path)

        # symlinks used to allow loading of sub-dirs can result in duplicate
        # definitions of macros and templates when loading all from top-level
        unique_files = []
        for f in files_to_process:
            if hasattr(f, 'read'):
                unique_files.append(f)
                continue
            rpf = os.path.realpath(f)
            if rpf not in unique_files:
                unique_files.append(rpf)
            else:
                logger.warning("File '%s' already added as '%s', ignoring "
                               "reference to avoid duplicating yaml "
                               "definitions." % (f, rpf))

        for in_file in unique_files:
            # use of ask-for-permissions instead of ask-for-forgiveness
            # performs better when low use cases.
            if hasattr(in_file, 'name'):
                fname = in_file.name
            else:
                fname = in_file
            logger.debug("Parsing YAML file {0}".format(fname))
            if hasattr(in_file, 'read'):
                self.parser.parse_fp(in_file)
            else:
                self.parser.parse(in_file)

    def delete_old_managed(self, keep=None):
        jobs = self.jenkins.get_jobs()
        deleted_jobs = 0
        if keep is None:
            keep = [job.name for job in self.parser.xml_jobs]
        for job in jobs:
            if job['name'] not in keep:
                if self.jenkins.is_managed(job['name']):
                    logger.info("Removing obsolete jenkins job {0}"
                                .format(job['name']))
                    self.delete_job(job['name'])
                    deleted_jobs += 1
                else:
                    logger.info("Not deleting unmanaged jenkins job %s",
                                job['name'])
            else:
                logger.debug("Keeping job %s", job['name'])
        return deleted_jobs

    def delete_job(self, jobs_glob, fn=None):
        if fn:
            self.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if(self.cache.is_cached(job)):
                self.cache.set(job, '')
        self.cache.save()

    def delete_all_jobs(self):
        jobs = self.jenkins.get_jobs()
        logger.info("Number of jobs to delete:  %d", len(jobs))
        self.jenkins.delete_all_jobs()
        # Need to clear the JJB cache after deletion
        self.cache.clear()

    @parallelize
    def changed(self, job):
        md5 = job.md5()
        changed = self.ignore_cache or self.cache.has_changed(job.name, md5)
        if not changed:
            logger.debug("'{0}' has not changed".format(job.name))
        return changed

    def update_jobs(self, input_fn, jobs_glob=None, output=None,
                    n_workers=None):
        orig = time.time()
        self.load_files(input_fn)
        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(self.parser.jobs), str(step - orig))

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        if output:
            for job in self.parser.xml_jobs:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    output = utils.wrap_stream(output)
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                with io.open(output_fn, 'w', encoding='utf-8') as f:
                    f.write(job.output().decode('utf-8'))
            return self.parser.xml_jobs, len(self.parser.xml_jobs)

        # Filter out the jobs that did not change
        logging.debug('Filtering %d jobs for changed jobs',
                      len(self.parser.xml_jobs))
        step = time.time()
        jobs = [job for job in self.parser.xml_jobs
                if self.changed(job)]
        logging.debug("Filtered for changed jobs in %ss",
                      (time.time() - step))

        if not jobs:
            return [], 0

        # Update the jobs
        logging.debug('Updating jobs')
        step = time.time()
        p_params = [{'job': job} for job in jobs]
        results = self.parallel_update_job(
            n_workers=n_workers,
            parallelize=p_params)
        logging.debug("Parsing results")
        # generalize the result parsing, as a parallelized job always returns a
        # list
        if len(p_params) in (1, 0):
            results = [results]
        for result in results:
            if isinstance(result, Exception):
                raise result
            else:
                # update in-memory cache
                j_name, j_md5 = result
                self.cache.set(j_name, j_md5)
        # write cache to disk
        self.cache.save()
        logging.debug("Updated %d jobs in %ss",
                      len(jobs),
                      time.time() - step)
        logging.debug("Total run took %ss", (time.time() - orig))
        return jobs, len(jobs)

    @parallelize
    def parallel_update_job(self, job):
        self.jenkins.update_job(job.name, job.output().decode('utf-8'))
        return (job.name, job.md5())

    def update_job(self, input_fn, jobs_glob=None, output=None):
        logging.warn('Current update_job function signature is deprecated and '
                     'will change in future versions to the signature of the '
                     'new parallel_update_job')
        return self.update_jobs(input_fn, jobs_glob, output)
Example #34
0
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if ('project-type' in yaml_content):
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(registry)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(registry)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(registry)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(registry)
            elif (yaml_content['project-type'] == "multibranch"):
                project = project_multibranch.WorkflowMultiBranch(registry)
            elif (yaml_content['project-type'] == "multibranch-defaults"):
                project = project_multibranch.WorkflowMultiBranchDefaults(
                    registry)  # noqa
            elif (yaml_content['project-type'] == "externaljob"):
                project = project_externaljob.ExternalJob(registry)

        if 'view-type' in yaml_content:
            if yaml_content['view-type'] == "list":
                project = view_list.List(None)
            elif yaml_content['view-type'] == "pipeline":
                project = view_pipeline.Pipeline(None)
            else:
                raise InvalidAttributeError('view-type',
                                            yaml_content['view-type'])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # check output file is under correct path
        if 'name' in yaml_content:
            prefix = os.path.dirname(self.in_filename)
            # split using '/' since fullname uses URL path separator
            expected_folders = [
                os.path.normpath(
                    os.path.join(
                        prefix, '/'.join(
                            parser._getfullname(yaml_content).split('/')
                            [:-1])))
            ]
            actual_folders = [os.path.dirname(f) for f in self.out_filenames]

            self.assertEquals(
                expected_folders, actual_folders,
                "Output file under wrong path, was '%s', should be '%s'" %
                (self.out_filenames[0],
                 os.path.join(expected_folders[0],
                              os.path.basename(self.out_filenames[0]))))

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF))
Example #35
0
class Builder(object):
    def __init__(self, jenkins_url, jenkins_user, jenkins_password,
                 config=None, ignore_cache=False, flush_cache=False,
                 plugins_list=None):
        self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password)
        self.cache = CacheStorage(jenkins_url, flush=flush_cache)
        self.global_config = config
        self.ignore_cache = ignore_cache
        self._plugins_list = plugins_list

    @property
    def plugins_list(self):
        if self._plugins_list is None:
            self._plugins_list = self.jenkins.get_plugins_info()
        return self._plugins_list

    def load_files(self, fn):
        self.parser = YamlParser(self.global_config, self.plugins_list)

        # handle deprecated behavior
        if not hasattr(fn, '__iter__'):
            logger.warning(
                'Passing single elements for the `fn` argument in '
                'Builder.load_files is deprecated. Please update your code '
                'to use a list as support for automatic conversion will be '
                'removed in a future version.')
            fn = [fn]

        files_to_process = []
        for path in fn:
            if os.path.isdir(path):
                files_to_process.extend([os.path.join(path, f)
                                         for f in os.listdir(path)
                                         if (f.endswith('.yml')
                                             or f.endswith('.yaml'))])
            else:
                files_to_process.append(path)

        # symlinks used to allow loading of sub-dirs can result in duplicate
        # definitions of macros and templates when loading all from top-level
        unique_files = []
        for f in files_to_process:
            rpf = os.path.realpath(f)
            if rpf not in unique_files:
                unique_files.append(rpf)
            else:
                logger.warning("File '%s' already added as '%s', ignoring "
                               "reference to avoid duplicating yaml "
                               "definitions." % (f, rpf))

        for in_file in unique_files:
            # use of ask-for-permissions instead of ask-for-forgiveness
            # performs better when low use cases.
            if hasattr(in_file, 'name'):
                fname = in_file.name
            else:
                fname = in_file
            logger.debug("Parsing YAML file {0}".format(fname))
            if hasattr(in_file, 'read'):
                self.parser.parse_fp(in_file)
            else:
                self.parser.parse(in_file)

    def delete_old_managed(self, keep=None):
        jobs = self.jenkins.get_jobs()
        deleted_jobs = 0
        if keep is None:
            keep = [job.name for job in self.parser.xml_jobs]
        for job in jobs:
            if job['name'] not in keep and \
                    self.jenkins.is_managed(job['name']):
                logger.info("Removing obsolete jenkins job {0}"
                            .format(job['name']))
                self.delete_job(job['name'])
                deleted_jobs += 1
            else:
                logger.debug("Ignoring unmanaged jenkins job %s",
                             job['name'])
        return deleted_jobs

    def delete_job(self, jobs_glob, fn=None):
        if fn:
            self.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if(self.cache.is_cached(job)):
                self.cache.set(job, '')

    def delete_all_jobs(self):
        jobs = self.jenkins.get_jobs()
        logger.info("Number of jobs to delete:  %d", len(jobs))
        for job in jobs:
            self.delete_job(job['name'])

    def update_job(self, input_fn, jobs_glob=None, output=None):
        self.load_files(input_fn)
        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        updated_jobs = 0
        for job in self.parser.xml_jobs:
            if output:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                with io.open(output_fn, 'w', encoding='utf-8') as f:
                    f.write(job.output().decode('utf-8'))
                continue
            md5 = job.md5()
            if (self.jenkins.is_job(job.name)
                    and not self.cache.is_cached(job.name)):
                old_md5 = self.jenkins.get_job_md5(job.name)
                self.cache.set(job.name, old_md5)

            if self.cache.has_changed(job.name, md5) or self.ignore_cache:
                self.jenkins.update_job(job.name, job.output())
                updated_jobs += 1
                self.cache.set(job.name, md5)
            else:
                logger.debug("'{0}' has not changed".format(job.name))
        return self.parser.xml_jobs, updated_jobs
Example #36
0
class Builder(object):
    def __init__(self, jjb_config):
        self.jenkins = Jenkins(jjb_config.jenkins['url'],
                               jjb_config.jenkins['user'],
                               jjb_config.jenkins['password'],
                               jjb_config.jenkins['timeout'])
        self.cache = CacheStorage(jjb_config.jenkins['url'],
                                  flush=jjb_config.builder['flush_cache'])
        self._plugins_list = jjb_config.builder['plugins_info']

        self.jjb_config = jjb_config

    @property
    def plugins_list(self):
        if self._plugins_list is None:
            self._plugins_list = self.jenkins.get_plugins_info()
        return self._plugins_list

    def delete_old_managed(self, keep=None):
        jobs = self.jenkins.get_jobs()
        deleted_jobs = 0
        if keep is None:
            keep = [job.name for job in self.parser.xml_jobs]
        for job in jobs:
            if job['name'] not in keep:
                if self.jenkins.is_managed(job['name']):
                    logger.info("Removing obsolete jenkins job {0}"
                                .format(job['name']))
                    self.delete_job(job['name'])
                    deleted_jobs += 1
                else:
                    logger.info("Not deleting unmanaged jenkins job %s",
                                job['name'])
            else:
                logger.debug("Keeping job %s", job['name'])
        return deleted_jobs

    def delete_job(self, jobs_glob, fn=None):
        self.parser = YamlParser(self.jjb_config, self.plugins_list)

        if fn:
            self.parser.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if(self.cache.is_cached(job)):
                self.cache.set(job, '')
        self.cache.save()

    def delete_all_jobs(self):
        jobs = self.jenkins.get_jobs()
        logger.info("Number of jobs to delete:  %d", len(jobs))
        self.jenkins.delete_all_jobs()
        # Need to clear the JJB cache after deletion
        self.cache.clear()

    @parallelize
    def changed(self, job):
        md5 = job.md5()

        changed = (self.jjb_config.builder['ignore_cache'] or
                   self.cache.has_changed(job.name, md5))
        if not changed:
            logger.debug("'{0}' has not changed".format(job.name))
        return changed

    def update_jobs(self, input_fn, jobs_glob=None, output=None,
                    n_workers=None):
        orig = time.time()

        self.parser = YamlParser(self.jjb_config, self.plugins_list)
        self.parser.load_files(input_fn)

        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(self.parser.jobs), str(step - orig))

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        if output:
            # ensure only wrapped once
            if hasattr(output, 'write'):
                output = utils.wrap_stream(output)

            for job in self.parser.xml_jobs:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                with io.open(output_fn, 'w', encoding='utf-8') as f:
                    f.write(job.output().decode('utf-8'))
            return self.parser.xml_jobs, len(self.parser.xml_jobs)

        # Filter out the jobs that did not change
        logging.debug('Filtering %d jobs for changed jobs',
                      len(self.parser.xml_jobs))
        step = time.time()
        jobs = [job for job in self.parser.xml_jobs
                if self.changed(job)]
        logging.debug("Filtered for changed jobs in %ss",
                      (time.time() - step))

        if not jobs:
            return [], 0

        # Update the jobs
        logging.debug('Updating jobs')
        step = time.time()
        p_params = [{'job': job} for job in jobs]
        results = self.parallel_update_job(
            n_workers=n_workers,
            parallelize=p_params)
        logging.debug("Parsing results")
        # generalize the result parsing, as a parallelized job always returns a
        # list
        if len(p_params) in (1, 0):
            results = [results]
        for result in results:
            if isinstance(result, Exception):
                raise result
            else:
                # update in-memory cache
                j_name, j_md5 = result
                self.cache.set(j_name, j_md5)
        # write cache to disk
        self.cache.save()
        logging.debug("Updated %d jobs in %ss",
                      len(jobs),
                      time.time() - step)
        logging.debug("Total run took %ss", (time.time() - orig))
        return jobs, len(jobs)

    @parallelize
    def parallel_update_job(self, job):
        self.jenkins.update_job(job.name, job.output().decode('utf-8'))
        return (job.name, job.md5())

    def update_job(self, input_fn, jobs_glob=None, output=None):
        logging.warn('Current update_job function signature is deprecated and '
                     'will change in future versions to the signature of the '
                     'new parallel_update_job')
        return self.update_jobs(input_fn, jobs_glob, output)
Example #37
0
    def test_yaml_snippet(self, mock):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info", text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        e = pkg_resources.EntryPoint.parse
        d = pkg_resources.Distribution()
        config = configparser.ConfigParser()
        config.read(os.path.dirname(__file__) + "/../setup.cfg")
        groups = {}
        for key in config["entry_points"]:
            groups[key] = list()
            for line in config["entry_points"][key].split("\n"):
                if "" == line.strip():
                    continue
                groups[key].append(e(line, dist=d))

        def mock_iter_entry_points(group, name=None):
            return (entry for entry in groups[group]
                    if name is None or name == entry.name)

        mock.side_effect = mock_iter_entry_points
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if "project-type" in yaml_content:
            if yaml_content["project-type"] == "maven":
                project = project_maven.Maven(registry)
            elif yaml_content["project-type"] == "matrix":
                project = project_matrix.Matrix(registry)
            elif yaml_content["project-type"] == "flow":
                project = project_flow.Flow(registry)
            elif yaml_content["project-type"] == "githuborg":
                project = project_githuborg.GithubOrganization(registry)
            elif yaml_content["project-type"] == "multijob":
                project = project_multijob.MultiJob(registry)
            elif yaml_content["project-type"] == "multibranch":
                project = project_multibranch.WorkflowMultiBranch(registry)
            elif yaml_content["project-type"] == "multibranch-defaults":
                project = project_multibranch.WorkflowMultiBranchDefaults(
                    registry)  # noqa
            elif yaml_content["project-type"] == "externaljob":
                project = project_externaljob.ExternalJob(registry)

        if "view-type" in yaml_content:
            if yaml_content["view-type"] == "all":
                project = view_all.All(registry)
            elif yaml_content["view-type"] == "delivery_pipeline":
                project = view_delivery_pipeline.DeliveryPipeline(registry)
            elif yaml_content["view-type"] == "list":
                project = view_list.List(registry)
            elif yaml_content["view-type"] == "nested":
                project = view_nested.Nested(registry)
            elif yaml_content["view-type"] == "pipeline":
                project = view_pipeline.Pipeline(registry)
            elif yaml_content["view-type"] == "sectioned":
                project = view_sectioned.Sectioned(registry)
            else:
                raise InvalidAttributeError("view-type",
                                            yaml_content["view-type"])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element("project")

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # check output file is under correct path
        if "name" in yaml_content:
            prefix = os.path.dirname(self.in_filename)
            # split using '/' since fullname uses URL path separator
            expected_folders = [
                os.path.normpath(
                    os.path.join(
                        prefix,
                        "/".join(
                            parser._getfullname(yaml_content).split("/")[:-1]),
                    ))
            ]
            actual_folders = [os.path.dirname(f) for f in self.out_filenames]

            self.assertEquals(
                expected_folders,
                actual_folders,
                "Output file under wrong path, was '%s', should be '%s'" % (
                    self.out_filenames[0],
                    os.path.join(expected_folders[0],
                                 os.path.basename(self.out_filenames[0])),
                ),
            )

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, "fixturejob").output().decode("utf-8")

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(
                expected_xml, doctest.ELLIPSIS | doctest.REPORT_NDIFF),
        )
class Builder(object):
    def __init__(self,
                 jenkins_url,
                 jenkins_user,
                 jenkins_password,
                 config=None,
                 ignore_cache=False,
                 flush_cache=False,
                 plugins_list=None):
        self.jenkins = Jenkins(jenkins_url, jenkins_user, jenkins_password)
        self.cache = CacheStorage(jenkins_url, flush=flush_cache)
        self.global_config = config
        self.ignore_cache = ignore_cache
        self._plugins_list = plugins_list

    @property
    def plugins_list(self):
        if self._plugins_list is None:
            self._plugins_list = self.jenkins.get_plugins_info()
        return self._plugins_list

    def load_files(self, fn):
        self.parser = YamlParser(self.global_config, self.plugins_list)

        # handle deprecated behavior
        if not hasattr(fn, '__iter__'):
            logger.warning(
                'Passing single elements for the `fn` argument in '
                'Builder.load_files is deprecated. Please update your code '
                'to use a list as support for automatic conversion will be '
                'removed in a future version.')
            fn = [fn]

        files_to_process = []
        for path in fn:
            if os.path.isdir(path):
                files_to_process.extend([
                    os.path.join(path, f) for f in os.listdir(path)
                    if (f.endswith('.yml') or f.endswith('.yaml'))
                ])
            else:
                files_to_process.append(path)

        # symlinks used to allow loading of sub-dirs can result in duplicate
        # definitions of macros and templates when loading all from top-level
        unique_files = []
        for f in files_to_process:
            rpf = os.path.realpath(f)
            if rpf not in unique_files:
                unique_files.append(rpf)
            else:
                logger.warning("File '%s' already added as '%s', ignoring "
                               "reference to avoid duplicating yaml "
                               "definitions." % (f, rpf))

        for in_file in unique_files:
            # use of ask-for-permissions instead of ask-for-forgiveness
            # performs better when low use cases.
            if hasattr(in_file, 'name'):
                fname = in_file.name
            else:
                fname = in_file
            logger.debug("Parsing YAML file {0}".format(fname))
            if hasattr(in_file, 'read'):
                self.parser.parse_fp(in_file)
            else:
                self.parser.parse(in_file)

    def delete_old_managed(self, keep):
        jobs = self.jenkins.get_jobs()
        deleted_jobs = 0
        for job in jobs:
            if job['name'] not in keep and \
                    self.jenkins.is_managed(job['name']):
                logger.info("Removing obsolete jenkins job {0}".format(
                    job['name']))
                self.delete_job(job['name'])
                deleted_jobs += 1
            else:
                logger.debug("Ignoring unmanaged jenkins job %s", job['name'])
        return deleted_jobs

    def delete_job(self, jobs_glob, fn=None):
        if fn:
            self.load_files(fn)
            self.parser.expandYaml([jobs_glob])
            jobs = [j['name'] for j in self.parser.jobs]
        else:
            jobs = [jobs_glob]

        if jobs is not None:
            logger.info("Removing jenkins job(s): %s" % ", ".join(jobs))
        for job in jobs:
            self.jenkins.delete_job(job)
            if (self.cache.is_cached(job)):
                self.cache.set(job, '')

    def delete_all_jobs(self):
        jobs = self.jenkins.get_jobs()
        logger.info("Number of jobs to delete:  %d", len(jobs))
        for job in jobs:
            self.delete_job(job['name'])

    def update_job(self, input_fn, jobs_glob=None, output=None):
        self.load_files(input_fn)
        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        updated_jobs = 0
        for job in self.parser.xml_jobs:
            if output:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                f = open(output_fn, 'w')
                f.write(job.output())
                f.close()
                continue
            md5 = job.md5()
            if (self.jenkins.is_job(job.name)
                    and not self.cache.is_cached(job.name)):
                old_md5 = self.jenkins.get_job_md5(job.name)
                self.cache.set(job.name, old_md5)

            if self.cache.has_changed(job.name, md5) or self.ignore_cache:
                self.jenkins.update_job(job.name, job.output())
                updated_jobs += 1
                self.cache.set(job.name, md5)
            else:
                logger.debug("'{0}' has not changed".format(job.name))
        return self.parser.xml_jobs, updated_jobs
Example #39
0
    def update_jobs(self, input_fn, jobs_glob=None, output=None,
                    n_workers=None):
        orig = time.time()

        self.parser = YamlParser(self.jjb_config, self.plugins_list)
        self.parser.load_files(input_fn)

        self.parser.expandYaml(jobs_glob)
        self.parser.generateXML()
        step = time.time()
        logging.debug('%d XML files generated in %ss',
                      len(self.parser.jobs), str(step - orig))

        logger.info("Number of jobs generated:  %d", len(self.parser.xml_jobs))
        self.parser.xml_jobs.sort(key=operator.attrgetter('name'))

        if (output and not hasattr(output, 'write')
                and not os.path.isdir(output)):
            logger.info("Creating directory %s" % output)
            try:
                os.makedirs(output)
            except OSError:
                if not os.path.isdir(output):
                    raise

        if output:
            # ensure only wrapped once
            if hasattr(output, 'write'):
                output = utils.wrap_stream(output)

            for job in self.parser.xml_jobs:
                if hasattr(output, 'write'):
                    # `output` is a file-like object
                    logger.info("Job name:  %s", job.name)
                    logger.debug("Writing XML to '{0}'".format(output))
                    try:
                        output.write(job.output())
                    except IOError as exc:
                        if exc.errno == errno.EPIPE:
                            # EPIPE could happen if piping output to something
                            # that doesn't read the whole input (e.g.: the UNIX
                            # `head` command)
                            return
                        raise
                    continue

                output_fn = os.path.join(output, job.name)
                logger.debug("Writing XML to '{0}'".format(output_fn))
                with io.open(output_fn, 'w', encoding='utf-8') as f:
                    f.write(job.output().decode('utf-8'))
            return self.parser.xml_jobs, len(self.parser.xml_jobs)

        # Filter out the jobs that did not change
        logging.debug('Filtering %d jobs for changed jobs',
                      len(self.parser.xml_jobs))
        step = time.time()
        jobs = [job for job in self.parser.xml_jobs
                if self.changed(job)]
        logging.debug("Filtered for changed jobs in %ss",
                      (time.time() - step))

        if not jobs:
            return [], 0

        # Update the jobs
        logging.debug('Updating jobs')
        step = time.time()
        p_params = [{'job': job} for job in jobs]
        results = self.parallel_update_job(
            n_workers=n_workers,
            parallelize=p_params)
        logging.debug("Parsing results")
        # generalize the result parsing, as a parallelized job always returns a
        # list
        if len(p_params) in (1, 0):
            results = [results]
        for result in results:
            if isinstance(result, Exception):
                raise result
            else:
                # update in-memory cache
                j_name, j_md5 = result
                self.cache.set(j_name, j_md5)
        # write cache to disk
        self.cache.save()
        logging.debug("Updated %d jobs in %ss",
                      len(jobs),
                      time.time() - step)
        logging.debug("Total run took %ss", (time.time() - orig))
        return jobs, len(jobs)
    def test_yaml_snippet(self):
        if not self.in_filename:
            return

        jjb_config = self._get_config()

        expected_xml = self._read_utf8_content()
        yaml_content = self._read_yaml_content(self.in_filename)

        plugins_info = None
        if self.plugins_info_filename:
            plugins_info = self._read_yaml_content(self.plugins_info_filename)
            self.addDetail("plugins-info-filename",
                           text_content(self.plugins_info_filename))
            self.addDetail("plugins-info",
                           text_content(str(plugins_info)))

        parser = YamlParser(jjb_config)
        registry = ModuleRegistry(jjb_config, plugins_info)
        registry.set_parser_data(parser.data)

        pub = self.klass(registry)

        project = None
        if ('project-type' in yaml_content):
            if (yaml_content['project-type'] == "maven"):
                project = project_maven.Maven(registry)
            elif (yaml_content['project-type'] == "matrix"):
                project = project_matrix.Matrix(registry)
            elif (yaml_content['project-type'] == "flow"):
                project = project_flow.Flow(registry)
            elif (yaml_content['project-type'] == "multijob"):
                project = project_multijob.MultiJob(registry)
            elif (yaml_content['project-type'] == "multibranch"):
                project = project_multibranch.WorkflowMultiBranch(registry)
            elif (yaml_content['project-type'] == "multibranch-defaults"):
                project = project_multibranch.WorkflowMultiBranchDefaults(registry)  # noqa
            elif (yaml_content['project-type'] == "externaljob"):
                project = project_externaljob.ExternalJob(registry)

        if 'view-type' in yaml_content:
            if yaml_content['view-type'] == "all":
                project = view_all.All(None)
            elif yaml_content['view-type'] == "list":
                project = view_list.List(None)
            elif yaml_content['view-type'] == "pipeline":
                project = view_pipeline.Pipeline(None)
            else:
                raise InvalidAttributeError(
                    'view-type', yaml_content['view-type'])

        if project:
            xml_project = project.root_xml(yaml_content)
        else:
            xml_project = XML.Element('project')

        # Generate the XML tree directly with modules/general
        pub.gen_xml(xml_project, yaml_content)

        # check output file is under correct path
        if 'name' in yaml_content:
            prefix = os.path.dirname(self.in_filename)
            # split using '/' since fullname uses URL path separator
            expected_folders = [os.path.normpath(
                os.path.join(prefix,
                             '/'.join(parser._getfullname(yaml_content).
                                      split('/')[:-1])))]
            actual_folders = [os.path.dirname(f) for f in self.out_filenames]

            self.assertEquals(
                expected_folders, actual_folders,
                "Output file under wrong path, was '%s', should be '%s'" %
                (self.out_filenames[0],
                 os.path.join(expected_folders[0],
                              os.path.basename(self.out_filenames[0]))))

        # Prettify generated XML
        pretty_xml = XmlJob(xml_project, 'fixturejob').output().decode('utf-8')

        self.assertThat(
            pretty_xml,
            testtools.matchers.DocTestMatches(expected_xml,
                                              doctest.ELLIPSIS |
                                              doctest.REPORT_NDIFF)
        )