def test_check_values_respects_other_violations(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note': '{"metrics":{"key1": {"value": 10, "description": ""},'
                '"key2": {"value": 10, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        exclusion = {
            'note':
            '{"committer": "testbot", "reason": "none", "exclusion": {"key1": 15}}'
        }
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME,
            commit_range='123456..HEAD',
            toReturn=[exclusion])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=8, description='', checked=True, direction=FALLING),
            'key2':
            DataBean(value=12, description='', checked=True, direction=FALLING)
        }

        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
    def test_check_values_rising(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 20, "description": ""}}}',
                'hash': 'abcdef',
                'commiter': 'me'
            })
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='abcdef..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=10, description='', checked=True, direction=RISING)
        }

        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
    def test_check_values_generates_tests_report_when_no_previous_metrics(
            self):
        test_suite_logger = Mock()
        test_suite_logger_factory_mock = Mock().ordered_new_logger(
            "ehMetrics", toReturn=test_suite_logger)

        test_suite_logger.ordered_success('key2',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_success('key3',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_save(toReturn=None)

        processor = MetricsProcessor(test_suite_logger_factory_mock)

        metrics = DataBean(
            metrics={
                'key2':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING),
                'key3':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING)
            })

        args = Mock(non_interactive=True)
        self.assertEqual(
            Callable.success,
            processor.check_values(args, metrics, Mock(),
                                   MockFileUtils())(Mock()))

        test_suite_logger.verify_all_ordered()
    def test_check_values_generates_tests_report_with_previous_metrics(self):
        test_suite_logger = Mock()
        test_suite_logger_factory_mock = Mock().ordered_new_logger(
            "ehMetrics", toReturn=test_suite_logger)

        test_suite_logger.ordered_failed(
            'key2',
            0,
            'EHMetrics',
            'metric key2 () increased from 9 to 10.',
            toReturn=None)
        test_suite_logger.ordered_success('key3',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_save(toReturn=None)

        processor = MetricsProcessor(test_suite_logger_factory_mock)

        metrics = DataBean(
            metrics={
                'key2':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING),
                'key3':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING)
            })

        git = Mock()
        git.ordered_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 10, "description": ""}, "key2": {"value": 9, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        git.ordered_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[])

        args = Mock(non_interactive=True)
        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))

        git.verify_all_ordered()
        test_suite_logger.verify_all_ordered()
    def test_check_values_rising(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(STATS_REF_NAME, toReturn={'note': '{"metrics":{"key1": {"value": 20, "description": ""}}}',
                                                                                  'hash': 'abcdef',
                                                                                  'commiter': 'me'})
        git.expect_generate_annotated_commits_with_details(STATS_EXCLUSION_REF_NAME, commit_range='abcdef..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {'key1': DataBean(value=10, description='', checked=True, direction=RISING)}

        self.assertEqual(Callable.do_not_proceed, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
    def test_metrics_processor_does_not_call_prepare_module_when_all_collectors_are_filtered_out(
            self):
        processor = MetricsProcessor()

        logger = Mock()

        module = 'module1'

        collector = Mock(key='filtered')

        module_description = Mock()
        module_description.expect_get_collectors(toReturn=[collector])

        collector2 = Mock(key='testing')
        collector2.expect_get_values(toReturn={'value1': 23})

        module_description2 = Mock()
        module_description2.expect_get_collectors(toReturn=[collector2])
        module_description2.expect_measured_modules(toReturn=[module])
        module_description2.expect_files(module, toReturn=[])

        module_descriptions = [module_description, module_description2]

        args = Mock(matching='test')

        result = DataBean()

        processor.process_metrics(args, module_descriptions, result)(logger)

        self.assertFalse(module_description.verify_prepare_environment(logger))
        self.assertTrue(module_description2.verify_prepare_environment(logger))
    def test_check_values_with_exclusions(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(STATS_REF_NAME, toReturn={'note': '{"metrics":{"key1": {"value": 10, "description": ""}}}',
                                                                                  'hash': '123456',
                                                                                  'commiter': 'me'})
        exclusion = {'note': '{"committer": "testbot", "reason": "none", "exclusion": {"key1": 15}}'}
        git.expect_generate_annotated_commits_with_details(STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[exclusion])

        metrics = DataBean()
        metrics.metrics = {'key1': DataBean(value=12, description='', checked=True, direction=FALLING)}

        self.assertEqual(Callable.success, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
        def generate_parsers_from_workspace():
            jira_workspace_dir = workspace_utils.get_jira_workspace_dir(file_utils)
            workspace_projects = workspace_utils.get_workspace_projects(file_utils)

            for parser in (pom_parser_class_object(os.sep.join([jira_workspace_dir, project, "pom.xml"]))
                           for project in workspace_projects):
                if parser.get_artifact_id() == 'jira-project':
                    #jira-project is often referred as jira.version property and not jira.project.version
                    #as this is out main project allow for exception here and set both properties for now
                    #might create mapping file for that but it seems overkill for now
                    mock_parser = DataBean()
                    mock_parser.get_artifact_id = lambda: 'jira'
                    mock_parser.get_version = lambda: parser.get_version()
                    yield mock_parser
                yield parser
            for parser in (pom_parser_class_object(os.sep.join([jira_workspace_dir, project, "pom.xml"]))
                           for project in workspace_projects):
                yield parser
    def test_check_values_ignores_additional_and_missing_keys(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()


        git.expect_latest_annotated_commit_with_details(STATS_REF_NAME, toReturn={'note': '{"metrics":{'
                                                                        '"key1": {"value": 20, "description": ""}, '
                                                                        '"key2": {"value": 10, "description": ""}   }}',
                                                                                  'hash': '123efef',
                                                                                  'commiter': 'me'})
        git.expect_generate_annotated_commits_with_details(STATS_EXCLUSION_REF_NAME, commit_range='123efef..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {'key2': DataBean(value=10, description='', checked=True, direction=FALLING),
                           'key3': DataBean(value=10, description='', checked=True, direction=FALLING)}

        self.assertEqual(Callable.success, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
Exemple #10
0
        def generate_jira_core_sources_closure(log: Logger):
            workspace_args = DataBean()
            workspace_args.with_workspace = args.with_workspace

            # attempt to compile jira-core in offline mode - assume jira-api is installed.
            jira_core_gen_src = MavenCallable(workspace_args).phase('generate-sources').skip_tests().project('jira-components/jira-core').option('-o')
            jira_core_gen_src(log)
            if jira_core_gen_src.returncode == Callable.success:
                return jira_core_gen_src.returncode
            else:
                # something failed: maybe jira-api was not actually installed? fix this:
                log.warn('Generate sources failed for jira-core. Will attempt to compile and install jira-api before giving up.')
                jira_api_install = MavenCallable(workspace_args).phase('install').skip_tests().project('jira-components/jira-api')
                jira_api_install(log)
                if jira_api_install.returncode != Callable.success:
                    return jira_api_install.returncode
                jira_core_gen_src.returncode = None
                jira_core_gen_src(log)
                return jira_core_gen_src.returncode
    def test_check_values_with_unchecked(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 10, "description": ""}, '
                '"key2": {"value": 10, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=12,
                     description='',
                     checked=False,
                     direction=FALLING)
        }
        metrics.metrics = {
            'key1':
            DataBean(value=8, description='', checked=False, direction=RISING)
        }

        self.assertEqual(
            Callable.success,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
Exemple #12
0
        def generate_parsers_from_workspace():
            jira_workspace_dir = workspace_utils.get_jira_workspace_dir(
                file_utils)
            workspace_projects = workspace_utils.get_workspace_projects(
                file_utils)

            for parser in (pom_parser_class_object(
                    os.sep.join([jira_workspace_dir, project, "pom.xml"]))
                           for project in workspace_projects):
                if parser.get_artifact_id() == 'jira-project':
                    #jira-project is often referred as jira.version property and not jira.project.version
                    #as this is out main project allow for exception here and set both properties for now
                    #might create mapping file for that but it seems overkill for now
                    mock_parser = DataBean()
                    mock_parser.get_artifact_id = lambda: 'jira'
                    mock_parser.get_version = lambda: parser.get_version()
                    yield mock_parser
                yield parser
            for parser in (pom_parser_class_object(
                    os.sep.join([jira_workspace_dir, project, "pom.xml"]))
                           for project in workspace_projects):
                yield parser
Exemple #13
0
        def generate_jira_core_sources_closure(log: Logger):
            workspace_args = DataBean()
            workspace_args.with_workspace = args.with_workspace

            # attempt to compile jira-core in offline mode - assume jira-api is installed.
            jira_core_gen_src = MavenCallable(workspace_args).phase(
                'generate-sources').skip_tests().project(
                    'jira-components/jira-core').option('-o')
            jira_core_gen_src(log)
            if jira_core_gen_src.returncode == Callable.success:
                return jira_core_gen_src.returncode
            else:
                # something failed: maybe jira-api was not actually installed? fix this:
                log.warn(
                    'Generate sources failed for jira-core. Will attempt to compile and install jira-api before giving up.'
                )
                jira_api_install = MavenCallable(workspace_args).phase(
                    'install').skip_tests().project('jira-components/jira-api')
                jira_api_install(log)
                if jira_api_install.returncode != Callable.success:
                    return jira_api_install.returncode
                jira_core_gen_src.returncode = None
                jira_core_gen_src(log)
                return jira_core_gen_src.returncode
Exemple #14
0
    def produce_result(self,
                       value=None,
                       description=None,
                       checked=None,
                       direction=None):

        value = self.value if value is None else value
        description = self.description if description is None else description
        checked = self.checked if checked is None else checked
        direction = self.direction if direction is None else direction

        return DataBean(value=value,
                        description=description,
                        checked=checked,
                        direction=direction)
    def test_metrics_processor(self):
        processor = MetricsProcessor()

        logger = Mock()

        module = 'module1'

        collector = Mock()
        collector.expect_get_values(toReturn={'value1': 23})
        collector.expect_wants_file('a', toReturn=True)
        collector.expect_wants_file('b', toReturn=False)
        collector.expect_on_read_line('aline', toReturn=False)

        file_a = MockFile()
        file_a.expect_readline(toReturn='aline')

        module_description = Mock()
        module_description.expect_get_collectors(toReturn=[collector])
        module_description.expect_measured_modules(toReturn=[module])
        module_description.expect_files(module, toReturn=['a', 'b'])
        module_description.expect_open_file('a', toReturn=file_a)

        module_descriptions = [module_description]

        args = Mock(matching=None, buildno='ORLY-9')

        result = DataBean()

        processor.process_metrics(args, module_descriptions, result)(logger)

        self.assertTrue(module_description.verify_prepare_environment(logger))
        self.assertTrue(collector.verify_pre_files_scan(module))
        self.assertTrue(collector.verify_on_read_line('aline'))
        self.assertEqual(1, collector.callcount_on_read_line())
        self.assertTrue(collector.verify_post_files_scan(module))
        self.assertDictEqual({'jira.stats.value1': 23}, result.metrics)
        self.assertEqual('ORLY-9', result.build_number)
Exemple #16
0
    def __call__(self, args, executor: CommandExecutor):
        def check_remotes(log):
            if len(self.git.get_remotes()) == 0:
                self.set_remote(log)
            return Callable.success

        executor.append(check_remotes)

        if not args.fast:
            executor.append(
                lambda log: Callable.success
                if self.git.fetch_notes('*') == 0 else log.error(
                    'FATAL: git: Failure to fetch notes from origin.') or
                Callable.do_not_proceed)

        if args.branch:

            def branch_check(logger):
                current_branch = self.git.current_branch()
                if not current_branch == args.branch:
                    logger.error(
                        'Branch check failed. You seem to be on "%s"; switch to "%s" first!'
                        % (current_branch, args.branch))
                    return Callable.do_not_proceed
                else:
                    return Callable.success

            executor.append(branch_check)

        def check_workspace(log: Logger):
            if args.note or not args.non_interactive:
                if not self.git.is_clean_workspace():
                    if args.note:
                        log.error(
                            'I cannot write notes with local changes. Commit your work first, so that notes can '
                            'be attached to your commit.')
                        return Callable.do_not_proceed
                    else:
                        log.warn(
                            'You have uncommitted changes - if engineering health metrics are increased, you will '
                            'not be able to add an exclusion note for the build.'
                        )
            return Callable.success

        executor.append(check_workspace)

        def clean_logs(log: Logger):
            if self.fs.dir_exists(MetricsCollector.log_directory):
                log.debug('Removing directory: %s' %
                          MetricsCollector.log_directory)
                self.fs.remove_dir(MetricsCollector.log_directory)
            return Callable.success

        executor.append(clean_logs)

        def record_commit(log: Logger):
            self.fs.write_lines(
                os.sep.join([
                    self.fs.existing_dir(MetricsCollector.log_directory),
                    '.commit'
                ]), [self.git.current_commit()])
            return Callable.success

        executor.append(record_commit)

        metrics = DataBean()

        modules_descriptions = [
            JIRADirectoryScanModulesDescription(args.fast, file_utils=self.fs),
            BundledPluginsModulesDescription(args.fast),
            JIRATestsModulesDescription(args.fast)
        ]

        executor.append(
            self.metrics_processor.process_metrics(args, modules_descriptions,
                                                   metrics))
        executor.append(
            self.metrics_processor.generate_report(metrics, self.fs, self.git))
        executor.append(
            self.metrics_processor.check_values(args, metrics, self.git,
                                                self.fs))

        if args.note:
            executor.append(lambda log: self.git.set_user(
                'jmake stats runner', '*****@*****.**'))
            executor.append(lambda log: self.git.put_notes(
                self.json_writer.as_str(metrics), STATS_REF_NAME, 'HEAD', True)
                            )
            executor.append(lambda log: self.git.push_notes(STATS_REF_NAME))
    def test_generate_report(self):
        git = Mock().expect_generate_annotated_commits_with_details(
            "jira-stats",
            toReturn=[
                {
                    'hash':
                    '12121212',
                    'shorthash':
                    '1212',
                    'commiter':
                    'Mark <unit@tester>',
                    'date':
                    1371114910,
                    'note':
                    '{"metrics": {"test.metric": 1}, "build_number": "BN-2"}'
                },
                {
                    'hash':
                    '58585858',
                    'shorthash':
                    '5858',
                    'commiter':
                    'Frank <frank@localhost>',
                    'date':
                    1371111910,
                    'note':
                    '{"metrics": {"test.metric": 3}, "build_number": "BN-1"}'
                },
            ]).ordered_get_commit_details("jira-stats",
                                          toReturn={
                                              'hash': '34343434',
                                              'shorthash': '3434',
                                              'commiter':
                                              'Unit Tester <unit@tester>',
                                              'date': 1371114916,
                                              'note': ''
                                          })

        json_writter = Mock().ordered_as_str(
            {
                'points': [{
                    'metrics': {
                        'test.metric': 3
                    },
                    'commits': [{
                        'hash': '58585858',
                        'shorthash': '5858',
                        'commiter': 'Frank <frank@localhost>',
                        'date': 1371111910
                    }],
                    'date':
                    1371111910,
                    'build_number':
                    'BN-1'
                }, {
                    'metrics': {
                        'test.metric': 1
                    },
                    'commits': [{
                        'hash': '12121212',
                        'shorthash': '1212',
                        'commiter': 'Mark <unit@tester>',
                        'date': 1371114910
                    }],
                    'date':
                    1371114910,
                    'build_number':
                    'BN-2'
                }, {
                    'metrics': {
                        'test.metric': 2
                    },
                    'commits': [{
                        'date': 1371114916,
                        'commiter': 'Unit Tester <unit@tester>',
                        'shorthash': '3434',
                        'hash': '34343434'
                    }],
                    'date':
                    1371114916,
                    'build_number':
                    'current'
                }]
            },
            toReturn='__JSON__')

        file_utils = MockFileUtils()
        log = Mock()

        current_metrics = DataBean(metrics={"test.metric": 2})

        processor = MetricsProcessor()
        generate_report_closure = processor.generate_report(
            current_metrics, file_utils, git, json_writter)
        generate_report_closure(log)
        self.assertTrue(
            file_utils.verify_write_lines(
                os.sep.join(['target', 'eh-metrics-report', 'js', 'data.js']),
                [
                    '(function() { var data = __JSON__; executeReport(data); })();'
                ]))