Example #1
0
    def test_file_and_size(self):

        filter = lambda x: x.startswith('t')

        m = MockFileUtils()
        for word, size in TestMetrics.numbers:
            m.expect_file_size(word, toReturn=size)

        obj = FileCountAndSize('test',
                               filter,
                               file_utils=m,
                               metrics_logger=Mock()).configure(Mock(), False)

        obj.pre_files_scan('test-module')
        for word, size in TestMetrics.numbers:
            self.assertFalse(
                obj.wants_file(word),
                'File And Size Count metrics cannot ask to open files.')
        obj.post_files_scan('test-module')

        files = 0
        bytes = 0
        for word, size in TestMetrics.numbers:
            if filter(word):
                bytes += size
                files += 1

        self.assertEqual(files, obj.get_values()['test.count'].value)
        self.assertEqual(bytes, obj.get_values()['test.cumulativesize'].value)
Example #2
0
 def test_clean_removes_old_home(self):
     self.args.mvn_clean = True
     file_utils = MockFileUtils()
     process_utils = Mock().default_check_output('Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode())
     horde_runner = MockHordeRunner(self.args, file_utils, process_utils, self.horde_status_checker.not_running())
     horde_runner(Mock())
     self.assertTrue(file_utils.verify_remove_dir(self.horde_layout.horde_home_dir(False)))
Example #3
0
 def test_horde_skeleton_is_not_copied_when_directory_exists(self):
     file_utils = MockFileUtils()
     process_utils = Mock().default_check_output('Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode())
     horde_runner = MockHordeRunner(self.args, file_utils, process_utils, self.horde_status_checker.not_running())
     horde_runner(Mock())
     self.assertTrue(file_utils.verify_copy_tree(self.horde_layout.horde_skeleton_dir(),
                                                 self.horde_layout.horde_home_dir(False)))
     self.assertFalse(file_utils.verify_remove_dir(self.horde_layout.horde_home_dir(False)))
 def test_downloading_skipped_when_tomcat_in_place(self):
     #having
     mock_file_utils = MockFileUtils()
     tomcat_downloader = MockTomcatDownloader(self.args, file_utils=mock_file_utils)
     mock_file_utils.expect_file_exists(self.layout.tomcat_dir(False), toReturn=True)
     #when
     tomcat_downloader(Mock())
     #then
     self.assertFalse(tomcat_downloader.executed, 'Downloading task was run')
Example #5
0
 def test_downloading_skipped_when_tomcat_in_place(self):
     #having
     mock_file_utils = MockFileUtils()
     tomcat_downloader = MockTomcatDownloader(self.args,
                                              file_utils=mock_file_utils)
     mock_file_utils.expect_file_exists(self.layout.tomcat_dir(False),
                                        toReturn=True)
     #when
     tomcat_downloader(Mock())
     #then
     self.assertFalse(tomcat_downloader.executed,
                      'Downloading task was run')
Example #6
0
 def test_clean_removes_old_home(self):
     self.args.mvn_clean = True
     file_utils = MockFileUtils()
     process_utils = Mock().default_check_output(
         'Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode(
         ))
     horde_runner = MockHordeRunner(self.args, file_utils, process_utils,
                                    self.horde_status_checker.not_running())
     horde_runner(Mock())
     self.assertTrue(
         file_utils.verify_remove_dir(
             self.horde_layout.horde_home_dir(False)))
Example #7
0
    def test_process_local_test_settings(self):
        fileutils = MockFileUtils()

        xml_location_key = '${test.xml.location}'
        file1 = os.sep.join(['.', 'jira-func-tests', 'src', 'main', 'resources', 'localtest.properties'])
        file2 = os.sep.join(['.', 'jira-webdriver-tests', 'src', 'main', 'resources', 'localtest.properties'])
        file3 = os.sep.join(
            ['.', 'jira-distribution', 'jira-integration-tests', 'src', 'main', 'resources', 'localtest.properties'])
        template = os.sep.join(['jira-func-tests', 'src', 'main', 'resources', 'localtest.template'])
        fileutils.expect_file_exists(file2, toReturn=True)
        fileutils.expect_file_exists(file3, toReturn=True)

        args = MocArgs()
        args.port = 98765
        args.jira_context = 'substituteThis'
        process_local_test_settings(args, fileutils)(Mock())

        # overwrite all each time:
        filtered_files = fileutils.filtered_files
        self.assertEqual(3, len(filtered_files))

        for f in [file1, file2, file3]:
            rpl = fileutils.verify_filter_file(template, f)
            self.assertIsNotNone(rpl)
            self.assertIn('${jira.port}', rpl)
            self.assertEqual(str(args.port), rpl['${jira.port}'])
            self.assertIn("${jira.context}", rpl)
            self.assertEqual(args.jira_context, rpl['${jira.context}'])
            self.assertIn(xml_location_key, rpl)

        self.assertIn('jira-func-tests', fileutils.verify_filter_file(template, file1)[xml_location_key])
        self.assertIn('jira-webdriver-tests', fileutils.verify_filter_file(template, file2)[xml_location_key])
        self.assertIn('jira-func-tests', fileutils.verify_filter_file(template, file3)[xml_location_key])
    def test_save(self):
        fs = MockFileUtils()

        expected_content = [
            '<?xml version="1.0" encoding="UTF-8"?>',
            '<testsuite failures="0" time="0" errors="0" skipped="0" tests="0" name="suiteName">',
            '<properties/>',
            '</testsuite>'
        ]

        expected_file_name = os.sep.join([SurefireSuiteLogger.target_dir, "TEST-suiteName.xml"])
        fs.expect_write_lines(expected_file_name, expected_content, toReturn = None)
        logger = SurefireSuiteLogger('suiteName', fs)
        logger.save()
        self.assertTrue(fs.verify_write_lines(expected_file_name, expected_content))
Example #9
0
 def test_horde_skeleton_is_not_copied_when_directory_exists(self):
     file_utils = MockFileUtils()
     process_utils = Mock().default_check_output(
         'Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode(
         ))
     horde_runner = MockHordeRunner(self.args, file_utils, process_utils,
                                    self.horde_status_checker.not_running())
     horde_runner(Mock())
     self.assertTrue(
         file_utils.verify_copy_tree(
             self.horde_layout.horde_skeleton_dir(),
             self.horde_layout.horde_home_dir(False)))
     self.assertFalse(
         file_utils.verify_remove_dir(
             self.horde_layout.horde_home_dir(False)))
    def test_check_values_rising(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 20, "description": ""}}}',
                'hash': 'abcdef',
                'commiter': 'me'
            })
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='abcdef..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=10, description='', checked=True, direction=RISING)
        }

        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
Example #11
0
 def __init__(self,
              metrics_name: str = 'deprecation.methods',
              description: str = 'Deprecated methods usage'):
     #noinspection PyTypeChecker
     super().__init__(metrics_name, description, MockFileUtils())
     self.maven_runs = []
     self.return_code = 0
    def test_check_values_with_unchecked(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 10, "description": ""}, '
                '"key2": {"value": 10, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=12,
                     description='',
                     checked=False,
                     direction=FALLING)
        }
        metrics.metrics = {
            'key1':
            DataBean(value=8, description='', checked=False, direction=RISING)
        }

        self.assertEqual(
            Callable.success,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
    def test_check_values_respects_other_violations(self):
        processor = MetricsProcessor()

        args = Mock(non_interactive=True)
        git = Mock()

        git.expect_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note': '{"metrics":{"key1": {"value": 10, "description": ""},'
                '"key2": {"value": 10, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        exclusion = {
            'note':
            '{"committer": "testbot", "reason": "none", "exclusion": {"key1": 15}}'
        }
        git.expect_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME,
            commit_range='123456..HEAD',
            toReturn=[exclusion])

        metrics = DataBean()
        metrics.metrics = {
            'key1':
            DataBean(value=8, description='', checked=True, direction=FALLING),
            'key2':
            DataBean(value=12, description='', checked=True, direction=FALLING)
        }

        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))
    def test_check_values_generates_tests_report_when_no_previous_metrics(
            self):
        test_suite_logger = Mock()
        test_suite_logger_factory_mock = Mock().ordered_new_logger(
            "ehMetrics", toReturn=test_suite_logger)

        test_suite_logger.ordered_success('key2',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_success('key3',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_save(toReturn=None)

        processor = MetricsProcessor(test_suite_logger_factory_mock)

        metrics = DataBean(
            metrics={
                'key2':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING),
                'key3':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING)
            })

        args = Mock(non_interactive=True)
        self.assertEqual(
            Callable.success,
            processor.check_values(args, metrics, Mock(),
                                   MockFileUtils())(Mock()))

        test_suite_logger.verify_all_ordered()
Example #15
0
    def test_simple_startup_without_options(self):
        #having
        tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils())
        #when
        tomcat_starter(Mock())
        #then

        #noinspection PyUnresolvedReferences
        self.assertEqual(tomcat_starter._SystemCallable__command,
                         self.layout.tomcat_executable() + ' run')
        self.assertTrue(tomcat_starter.executed,
                        'Process should have been executed')
        opts = self.__get_opts_dict(tomcat_starter)
        self.__assertParameter(opts, '-Djira.home', self.layout.jira_home())
        self.__assertParameter(opts, '-Djira.plugins.bundled.disable', 'false')
        self.__assertParameter(
            opts, '-Djira.dev.bundledplugins.url',
            'file://' + BundledPluginsUtility.BUNDLED_PLUGINS_LIST)
        self.__assertParameter(opts, '-Xms128m')
        self.__assertParameter(opts, '-Xmx1024m')
        self.__assertParameter(opts, '-XX:MaxPermSize', '384m')
        self.assertNotIn('-Djira.rebel.root', opts)
        self.assertNotIn('-javaagent:', opts)
        self.assertNotIn('-agentlib:jdwp:', opts)
        #now check tomcat environment variables
        self.__assertParameter(tomcat_starter.env, 'CATALINA_HOME',
                               self.layout.tomcat_dir(False))
        self.__assertParameter(tomcat_starter.env, 'CATALINA_TMPDIR',
                               self.layout.tomcat_temp_dir())
        self.__assertParameter(tomcat_starter.env, 'CATALINA_BASE',
                               self.layout.tomcat_work_dir())
Example #16
0
 def test_horde_is_not_run_when_one_is_already_running(self):
     horde_runner = MockHordeRunner(self.args, MockFileUtils(), Mock(),
                                    self.horde_status_checker.running())
     horde_runner(Mock())
     self.assertFalse(
         horde_runner.executed,
         "Parent class should not be executed when another instance is already running"
     )
Example #17
0
    def test_save(self):
        fs = MockFileUtils()

        expected_content = [
            '<?xml version="1.0" encoding="UTF-8"?>',
            '<testsuite failures="0" time="0" errors="0" skipped="0" tests="0" name="suiteName">',
            '<properties/>', '</testsuite>'
        ]

        expected_file_name = os.sep.join(
            [SurefireSuiteLogger.target_dir, "TEST-suiteName.xml"])
        fs.expect_write_lines(expected_file_name,
                              expected_content,
                              toReturn=None)
        logger = SurefireSuiteLogger('suiteName', fs)
        logger.save()
        self.assertTrue(
            fs.verify_write_lines(expected_file_name, expected_content))
Example #18
0
 def test_bundled_plugins_removes_jira_dev_bundledplugins_url(self):
     self.args.bundled_plugins = True
     #having
     tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils())
     #when
     tomcat_starter(Mock())
     #then
     opts = self.__get_opts_dict(tomcat_starter)
     self.assertNotIn('-Djira.dev.bundledplugins.url', opts)
    def test_jrebel_is_discovered_and_java_opts_are_set(self):
        #having
        self.args.jrebel = True
        file_utils = MockFileUtils()
        file_utils.expect_possible_idea_plugin_dirs(toReturn=['idea12', 'idea13', 'idea129'])
        jrebel_path = os.path.join('idea13', 'config', 'plugins', 'jr-ide-idea', 'lib', 'jrebel', 'jrebel.jar')
        file_utils.expect_file_exists(jrebel_path, toReturn=True)
        tomcat_starter = MockTomcatStarter(self.args, False, file_utils)

        #when
        return_code = tomcat_starter(Mock())

        #then
        self.assertEqual(return_code, Callable.success, 'Expected successful return code')
        opts = self.__get_opts_dict(tomcat_starter)
        self.__assertParameter(opts, '-javaagent:"' + jrebel_path + '"')
        self.__assertParameter(opts, '-Djira.rebel.root', '"' + PathUtils.abspath('.') + '"')
        self.assertNotIn('-agentlib:jdwp:', opts)
Example #20
0
    def test_setup_home_dir_has_to_unzip_and_filter_file(self):
        file_utils = MockFileUtils()
        jira_home = 'some-home'
        file_utils.expect_file_exists(jira_home, toReturn=False)

        args = Mock(layout=Mock().expect_jira_home(toReturn=jira_home))
        setup_jira_home = SetupJiraHomeHsqldb(args, file_utils)

        return_code = setup_jira_home(Mock())
        self.assertEqual(return_code, Callable.success, 'Invalid return code')

        self.verify_template_is_filtered_and_removed(file_utils,
                                                     os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_CONFIG]),
                                                     os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_CONFIG_TEMPLATE]))
        self.verify_template_is_filtered_and_removed(file_utils,
                                                     os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_DIRECTORY,
                                                                  SetupJiraHomeHsqldb.DB_SCRIPT]),
                                                     os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_DIRECTORY,
                                                                  SetupJiraHomeHsqldb.DB_SCRIPT_TEMPLATE]))
Example #21
0
    def setUp(self):
        self.fs = MockFileUtils()
        self.url = Mock()
        self.manifesto = ManifestoUtils(url_utils=self.url, fs=self.fs)

        self.url.expect_read(
            'https://manifesto.uc-inf.net/api/env/jirastudio-dev',
            None,
            None,
            toReturn='{ "hash": "123456" }')
Example #22
0
    def test_installing_jmake_module(self):

        workspace_modules_file = os.sep.join(['.', '.idea', 'modules.xml'])
        element = XML.Element('project', {'version':'4'})
        self.xml.expect_parse(workspace_modules_file,  element)

        # test that it is created:
        self.fileutils = MockFileUtils()
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module_list = element[0][0]
        self.assertEqual(len(module_list), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)

        # test that it is not overridden:
        self.fileutils = MockFileUtils()
        module.attrib['angry'] = 'nerdz'
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)
        self.assertIn('angry', module.attrib)
        self.assertEqual(module.attrib['angry'], 'nerdz')

        # test, that it will not be created when iml file exists
        module_list.remove(module)
        self.fileutils = MockFileUtils()
        self.fileutils.expect_file_exists(os.sep.join(['.', 'jmake_src', 'jmake_src.iml']), toReturn=True)
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 0)
        self.assertEqual(len(module_list), 0)

        # force should override that
        process_jmake_module(Mock(force=True), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        self.assertEqual(len(module_list), 1)
Example #23
0
    def test_validate_workspace_params_happy_path(self):
        fileutils = MockFileUtils()

        workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace'
        jira_project_dir = 'jira-project'
        pom_xml = 'pom.xml'

        fileutils.default_getcwd(os.sep.join([workspace_dir, jira_project_dir]))
        fileutils.expect_file_exists(os.sep.join([workspace_dir, pom_xml]), toReturn=True)
        fileutils.expect_get_parent_dir_path(toReturn=workspace_dir)

        callable_status = WorkspaceValidator(fileutils)(Mock())

        self.assertEqual(callable_status, Callable.success)
Example #24
0
    def test_debug_parameter_should_add_agentlib_jdwp_param(self):
        #having
        tomcat_starter = MockTomcatStarter(self.args, True, MockFileUtils())
        self.args.debug_port = 8546
        #when
        tomcat_starter(Mock())

        #then
        opts = self.__get_opts_dict(tomcat_starter)
        self.__assertParameter(
            opts, '-agentlib:jdwp',
            'transport=dt_socket,server=y,suspend=n,address=8546')
        self.assertNotIn('-javaagent:', opts)
Example #25
0
    def test_validate_workspace_params_fails_without_workspace(self):
        fileutils = MockFileUtils()

        workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace'
        jira_project_dir = 'jira-project'
        pom_xml = 'pom.xml'

        fileutils.expect_getcwd(toReturn=os.sep.join([workspace_dir, jira_project_dir]))
        fileutils.expect_file_exists(os.sep.join([workspace_dir, pom_xml]), toReturn=False)
        fileutils.expect_get_parent_dir_path(toReturn=workspace_dir)
        logger = Mock()

        callable_status = WorkspaceValidator(fileutils)(logger)

        self.assertEqual(logger.callcount_error(), 1)
        self.assertEqual(callable_status, Callable.failure)
    def test_check_values_generates_tests_report_with_previous_metrics(self):
        test_suite_logger = Mock()
        test_suite_logger_factory_mock = Mock().ordered_new_logger(
            "ehMetrics", toReturn=test_suite_logger)

        test_suite_logger.ordered_failed(
            'key2',
            0,
            'EHMetrics',
            'metric key2 () increased from 9 to 10.',
            toReturn=None)
        test_suite_logger.ordered_success('key3',
                                          0,
                                          'EHMetrics',
                                          toReturn=None)
        test_suite_logger.ordered_save(toReturn=None)

        processor = MetricsProcessor(test_suite_logger_factory_mock)

        metrics = DataBean(
            metrics={
                'key2':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING),
                'key3':
                DataBean(
                    value=10, description='', checked=True, direction=FALLING)
            })

        git = Mock()
        git.ordered_latest_annotated_commit_with_details(
            STATS_REF_NAME,
            toReturn={
                'note':
                '{"metrics":{"key1": {"value": 10, "description": ""}, "key2": {"value": 9, "description": ""}}}',
                'hash': '123456',
                'commiter': 'me'
            })
        git.ordered_generate_annotated_commits_with_details(
            STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[])

        args = Mock(non_interactive=True)
        self.assertEqual(
            Callable.do_not_proceed,
            processor.check_values(args, metrics, git,
                                   MockFileUtils())(Mock()))

        git.verify_all_ordered()
        test_suite_logger.verify_all_ordered()
Example #27
0
    def test_execution_email_enabled(self):

        self.args.enable_mail = True
        #having
        tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils())
        #when
        tomcat_starter(Mock())
        #then

        self.assertTrue(tomcat_starter.executed,
                        'Process should have been executed')
        opts = self.__get_opts_dict(tomcat_starter)
        self.__assertParameter(opts, '-Djira.home', self.layout.jira_home())
        self.__assertParameter(opts, '-Datlassian.mail.senddisabled', 'false')
        self.__assertParameter(opts, '-Datlassian.mail.fetchdisabled', 'false')
Example #28
0
    def test_file_and_size(self):

        filter = lambda x : x.startswith('t')

        m = MockFileUtils()
        for word, size in TestMetrics.numbers:
            m.expect_file_size(word, toReturn = size)

        obj = FileCountAndSize('test', filter, file_utils= m, metrics_logger=Mock()).configure(Mock(), False)

        obj.pre_files_scan('test-module')
        for word, size in TestMetrics.numbers:
            self.assertFalse(obj.wants_file(word), 'File And Size Count metrics cannot ask to open files.')
        obj.post_files_scan('test-module')

        files = 0
        bytes = 0
        for word, size in TestMetrics.numbers:
            if filter(word):
                bytes += size
                files += 1

        self.assertEqual(files, obj.get_values()['test.count'].value)
        self.assertEqual(bytes, obj.get_values()['test.cumulativesize'].value)
Example #29
0
    def test_setup_home_dir_when_already_exists(self):
        file_utils = MockFileUtils()
        file_utils.ordered_file_exists('some-home', toReturn=True)
        args = MocArgs(setup_home=True)
        args.layout.expect_jira_home(toReturn='some-home')
        setup_jira_home = SetupJiraHomeHsqldb(args, file_utils)

        return_code = setup_jira_home(Mock())
        self.assertEqual(return_code, Callable.success, 'Invalid return code')
        file_utils.verify_all_ordered()
Example #30
0
    def test_maven_download_executed_when_tomcat_does_not_exist(self):
        #having
        mock_file_utils = MockFileUtils()
        tomcat_downloader = MockTomcatDownloader(self.args,
                                                 file_utils=mock_file_utils)

        #when
        tomcat_downloader(Mock())

        #then
        self.assertTrue(tomcat_downloader.executed,
                        'Downloading task was expected to execute')
        self.assertListEqual(tomcat_downloader.projects, ['jira-ide-support'])
        self.assertListEqual(tomcat_downloader.profiles,
                             ['download-tomcat', 'ide-setup'])
        self.assertListEqual(tomcat_downloader.phases, ['initialize'])
        self.assertDictEqual(tomcat_downloader.properties,
                             {'tomcat.dir': self.layout.tomcat_download_dir()})
Example #31
0
    def _test_diff_metrics_impl(self, metrics_to_compare, files_now, files_before, expected_result, file_contents):
        file_utils = MockFileUtils()
        files_now = map(lambda f : MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f, files_now)
        files_before = map(lambda f : MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f, files_before)
        file_utils.expect_listdir('now', toReturn=files_now)
        file_utils.expect_listdir('before', toReturn=files_before)

        for file_name,file_contents in file_contents.items():
            for when in ['before', 'now']:
                if file_contents[when] is not None:
                    file_utils.expect_read_lines(os.sep.join([when, MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % file_name]), toReturn = file_contents[when])

        log = Mock()
        #noinspection PyTypeChecker
        md = MetricsDiff(file_utils)
        diffs = list(md.diff_metrics(log, 'now', 'before', metrics_to_compare))
        self.assertEqual(expected_result, diffs)
Example #32
0
    def test_execution_is_suppressed_when_jrebel_not_discovered(self):
        #having
        self.args.jrebel = True
        tomcat_starter = MockTomcatStarter(
            self.args, False,
            MockFileUtils().expect_possible_idea_plugin_dirs(toReturn=[]))
        log = Mock()
        #when
        return_code = tomcat_starter(log)

        #then
        self.assertEqual(return_code, Callable.do_not_proceed,
                         'Expected successful return code')

        self.assertTrue(log.callcount_error() > 0)
        # verify, that jrebel related error message was logged:
        self.assertTrue(
            reduce(lambda r, e: r or 'jrebel' in e, log.error.made_calls,
                   False))
class TestLegacyOdSetup(TestCase):
    def setUp(self):
        self.legacyOdSetup = LegacyOdSvnSetup(Mock(layout=Mock().expect_studio_svn_link(toReturn='/tmp/link')
                                                   .expect_studio_svn_dir(toReturn='/tmp/svn')))
        self.fs = MockFileUtils()

    def testLinkIsNotCreatedWhenExists(self):
        self.fs.expect_file_exists('/tmp/link', toReturn=True)
        self.legacyOdSetup(Mock(), file_utils=self.fs)
        self.assertEqual(self.fs.callcount_symlink(), 0)

    def testLinkIsCreatedWhenNotExists(self):
        self.fs.expect_file_exists('/tmp/link', toReturn=False)
        self.legacyOdSetup(Mock(), file_utils=self.fs)
        self.assertTrue(self.fs.verify_symlink('/tmp/svn', '/tmp/link'))
Example #34
0
    def setUp(self):
        super().setUp()
        self.git = Mock()
        self.git.expect_get_remotes(toReturn=['stash!'])

        self.json = Mock()
        self.json.default_as_str(SERIALIZED_METRICS)

        self.fs = MockFileUtils()
        self.executor = []
        self.metrics_processor = Mock()

        self.called_process_metrics = False

        def call_process_metrics(_):
            self.called_process_metrics = True
            return Callable.success

        self.metrics_processor.default_process_metrics(call_process_metrics)

        self.called_generate_report = False

        def call_generate_report(_):
            self.called_generate_report = True
            return Callable.success

        self.metrics_processor.default_generate_report(call_generate_report)

        self.called_check_values = False

        def call_check_values(_):
            self.called_check_values = True
            return Callable.success

        self.metrics_processor.default_check_values(call_check_values)

        self.ehmetrics = EhMetrics(git=self.git, fs=self.fs,
                                   metrics_processor=self.metrics_processor, json_writer=self.json)

        self.executor_cursor = 0
Example #35
0
    def _test_diff_metrics_impl(self, metrics_to_compare, files_now,
                                files_before, expected_result, file_contents):
        file_utils = MockFileUtils()
        files_now = map(lambda f: MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f,
                        files_now)
        files_before = map(lambda f: MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f,
                           files_before)
        file_utils.expect_listdir('now', toReturn=files_now)
        file_utils.expect_listdir('before', toReturn=files_before)

        for file_name, file_contents in file_contents.items():
            for when in ['before', 'now']:
                if file_contents[when] is not None:
                    file_utils.expect_read_lines(os.sep.join([
                        when, MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % file_name
                    ]),
                                                 toReturn=file_contents[when])

        log = Mock()
        #noinspection PyTypeChecker
        md = MetricsDiff(file_utils)
        diffs = list(md.diff_metrics(log, 'now', 'before', metrics_to_compare))
        self.assertEqual(expected_result, diffs)
Example #36
0
    def test_installing_jmake_module(self):

        workspace_modules_file = os.sep.join(['.', '.idea', 'modules.xml'])
        element = XML.Element('project', {'version': '4'})
        self.xml.expect_parse(workspace_modules_file, element)

        # test that it is created:
        self.fileutils = MockFileUtils()
        process_jmake_module(Mock(force=False), self.fileutils,
                             self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module_list = element[0][0]
        self.assertEqual(len(module_list), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)

        # test that it is not overridden:
        self.fileutils = MockFileUtils()
        module.attrib['angry'] = 'nerdz'
        process_jmake_module(Mock(force=False), self.fileutils,
                             self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)
        self.assertIn('angry', module.attrib)
        self.assertEqual(module.attrib['angry'], 'nerdz')

        # test, that it will not be created when iml file exists
        module_list.remove(module)
        self.fileutils = MockFileUtils()
        self.fileutils.expect_file_exists(os.sep.join(
            ['.', 'jmake_src', 'jmake_src.iml']),
                                          toReturn=True)
        process_jmake_module(Mock(force=False), self.fileutils,
                             self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 0)
        self.assertEqual(len(module_list), 0)

        # force should override that
        process_jmake_module(Mock(force=True), self.fileutils,
                             self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        self.assertEqual(len(module_list), 1)
Example #37
0
class TestLegacyOdSetup(TestCase):
    def setUp(self):
        self.legacyOdSetup = LegacyOdSvnSetup(
            Mock(layout=Mock().expect_studio_svn_link(
                toReturn='/tmp/link').expect_studio_svn_dir(
                    toReturn='/tmp/svn')))
        self.fs = MockFileUtils()

    def testLinkIsNotCreatedWhenExists(self):
        self.fs.expect_file_exists('/tmp/link', toReturn=True)
        self.legacyOdSetup(Mock(), file_utils=self.fs)
        self.assertEqual(self.fs.callcount_symlink(), 0)

    def testLinkIsCreatedWhenNotExists(self):
        self.fs.expect_file_exists('/tmp/link', toReturn=False)
        self.legacyOdSetup(Mock(), file_utils=self.fs)
        self.assertTrue(self.fs.verify_symlink('/tmp/svn', '/tmp/link'))
Example #38
0
 def test_ondemand_option_adds_properties(self):
     #having
     self.args.layout.ondemand = True
     self.args.horde_layout = HordeLayout(self.layout.jira_home())
     tomcat_starter = MockTomcatStarter(self.args, True, MockFileUtils())
     #when
     tomcat_starter(Mock())
     #then
     opts = self.__get_opts_dict(tomcat_starter)
     self.__assertParameter(opts, '-Dstudio.initial.data.xml',
                            self.layout.studio_initial_data())
     self.__assertParameter(opts, '-Dstudio.home', self.layout.jira_home())
     self.__assertParameter(
         opts,
         '-Datlassian.darkfeature.com.atlassian.jira.config.CoreFeatures.ON_DEMAND',
         'true')
     self.__assertParameter(
         opts, '-Dcrowd.property.application.login.url',
         self.args.horde_layout.horde_application_login_url())
     self.__assertParameter(opts, '-Dcrowd.property.crowd.server.url',
                            self.args.horde_layout.horde_server_url())
     self.__assertParameter(opts, '-Dstudio.webdav.directory',
                            self.layout.webdav_dir())
     self.assertNotIn('-Djira.dev.bundledplugins.url', opts)
Example #39
0
    def test_jrebel_is_discovered_and_java_opts_are_set(self):
        #having
        self.args.jrebel = True
        file_utils = MockFileUtils()
        file_utils.expect_possible_idea_plugin_dirs(
            toReturn=['idea12', 'idea13', 'idea129'])
        jrebel_path = os.path.join('idea13', 'config', 'plugins',
                                   'jr-ide-idea', 'lib', 'jrebel',
                                   'jrebel.jar')
        file_utils.expect_file_exists(jrebel_path, toReturn=True)
        tomcat_starter = MockTomcatStarter(self.args, False, file_utils)

        #when
        return_code = tomcat_starter(Mock())

        #then
        self.assertEqual(return_code, Callable.success,
                         'Expected successful return code')
        opts = self.__get_opts_dict(tomcat_starter)
        self.__assertParameter(opts, '-javaagent:"' + jrebel_path + '"')
        self.__assertParameter(opts, '-Djira.rebel.root',
                               '"' + PathUtils.abspath('.') + '"')
        self.assertNotIn('-agentlib:jdwp:', opts)
Example #40
0
    def test_transform_maven_executable_for_workspace(self):
        fileutils = MockFileUtils()

        workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace'
        jira_project_dir = 'jira-project'
        jira_rest_dir = 'jira-rest'
        pom_xml = 'pom.xml'

        fileutils.expect_get_parent_dir_relpath(workspace_dir, toReturn=jira_project_dir)

        fileutils.default_getcwd(os.sep.join([workspace_dir, jira_project_dir]))
        fileutils.expect_listdir(workspace_dir,
                                 toReturn=[jira_project_dir, jira_rest_dir, '.idea', pom_xml, 'README.txt'])

        fileutils.expect_dir_exists(os.sep.join([workspace_dir, jira_project_dir]), toReturn=True)
        fileutils.expect_dir_exists(os.sep.join([workspace_dir, jira_rest_dir]), toReturn=True)
        fileutils.expect_dir_exists(os.sep.join([workspace_dir, '.idea']), toReturn=True)

        fileutils.expect_file_exists(os.sep.join([workspace_dir, jira_project_dir, pom_xml]), toReturn=True)
        fileutils.expect_file_exists(os.sep.join([workspace_dir, jira_rest_dir, pom_xml]), toReturn=True)

        parser_mocks = {os.sep.join([workspace_dir, jira_project_dir, pom_xml]):
                            Mock().expect_get_artifact_id(toReturn=jira_project_dir).expect_get_version(toReturn='10.0.0-SNAPSHOT'),
                        os.sep.join([workspace_dir, jira_rest_dir, pom_xml]):
                            Mock().expect_get_artifact_id(toReturn=jira_rest_dir).expect_get_version(toReturn='9.1.2-SNAPSHOT')}

        PROJECT1 = os.sep.join(['jira-components', 'jira-webapp'])
        PROJECT2 = os.sep.join(['jira-components', 'jira-plugins', 'jira-bundled-plugins'])
        maven_mock = Mock(projects = [PROJECT1, PROJECT2])

        MavenCallableWorkspaceDelegator.after_init(maven_mock, workspace_dir, fileutils,
                                                   pom_parser_class_object=lambda path: parser_mocks[path])

        for k, v in {'jira.version': '10.0.0-SNAPSHOT',
                     'jira.project.version': '10.0.0-SNAPSHOT',
                     'jira.rest.version': '9.1.2-SNAPSHOT'}.items():
            self.assertTrue(maven_mock.verify_property(k, v))
Example #41
0
 def setUp(self):
     self.logger = Logger().set_none()
     self.fileutils = MockFileUtils()
     self.xml = MockXmlUtils()
    def test_generate_report(self):
        git = Mock().expect_generate_annotated_commits_with_details("jira-stats", toReturn=[
            {
                'hash': '12121212',
                'shorthash': '1212',
                'commiter': 'Mark <unit@tester>',
                'date': 1371114910,
                'note': '{"metrics": {"test.metric": 1}, "build_number": "BN-2"}'
            },
            {
                'hash': '58585858',
                'shorthash': '5858',
                'commiter': 'Frank <frank@localhost>',
                'date': 1371111910,
                'note': '{"metrics": {"test.metric": 3}, "build_number": "BN-1"}'
            },
        ]).ordered_get_commit_details("jira-stats", toReturn={
            'hash': '34343434',
            'shorthash': '3434',
            'commiter': 'Unit Tester <unit@tester>',
            'date': 1371114916,
            'note': ''
        })

        json_writter = Mock().ordered_as_str({'points': [
            {
                'metrics': {
                    'test.metric': 3
                },
                'commits': [
                    {
                        'hash': '58585858',
                        'shorthash': '5858',
                        'commiter': 'Frank <frank@localhost>',
                        'date': 1371111910
                    }
                ],
                'date': 1371111910,
                'build_number': 'BN-1'
            },
            {
                'metrics': {
                    'test.metric': 1
                },
                'commits': [
                    {
                        'hash': '12121212',
                        'shorthash': '1212',
                        'commiter': 'Mark <unit@tester>',
                        'date': 1371114910
                    }
                ],
                'date': 1371114910,
                'build_number': 'BN-2'
            },
            {
                'metrics': {
                    'test.metric': 2
                },
                'commits': [
                    {
                        'date': 1371114916,
                        'commiter': 'Unit Tester <unit@tester>',
                        'shorthash': '3434',
                        'hash': '34343434'
                    }
                ],
                'date': 1371114916,
                'build_number': 'current'
            }
        ]}, toReturn='__JSON__')

        file_utils = MockFileUtils()
        log = Mock()

        current_metrics = DataBean(metrics={
            "test.metric": 2
        })

        processor = MetricsProcessor()
        generate_report_closure = processor.generate_report(current_metrics, file_utils, git, json_writter)
        generate_report_closure(log)
        self.assertTrue(file_utils.verify_write_lines(os.sep.join(['target','eh-metrics-report','js','data.js']),
            ['(function() { var data = __JSON__; executeReport(data); })();']))
Example #43
0
class JmakeIdeaTest(TestCase):
    def setUp(self):
        self.logger = Logger().set_none()
        self.fileutils = MockFileUtils()
        self.xml = MockXmlUtils()

    def test_targets_for_projects(self):
        structure = {'files': ['pom.xml'],
                     'target': {},
                     'project1': {'files': ['pom.xml'],
                                  'target': {}},
                     'project2': {'target': {}},
                     'project3': {'files': ['pom.xml']}}
        self.fileutils.expect_walk('.', structure)

        self.assertListEqual(sorted(get_targets_for(['.'], self.fileutils)), sorted([os.sep.join(['root', 'target']),
                                                                                os.sep.join(
                                                                                    ['root', 'project1', 'target']),
                                                                                os.sep.join(
                                                                                    ['root', 'project3', 'target'])]))

    def test_project_opened(self):
        self.assertEqual(Callable.do_not_proceed, ensure_project_was_opened(self.logger, self.fileutils))
        self.fileutils.expect_dir_exists('.%s.idea' % os.sep, toReturn=True)
        self.assertEqual(Callable.success, ensure_project_was_opened(self.logger, self.fileutils))


    def test_jmake_idea_clean(self):

        process_clean(self.logger, self.fileutils)
        # no files existed, so none were removed:
        self.assertEqual(self.fileutils.callcount_remove_dir(), 0)
        self.assertEqual(self.fileutils.callcount_remove(), 0)

        files = [ os.sep.join(['.', '.idea', 'runConfigurations', 'Tomcat_6.xml']),
                  os.sep.join(['.', '.idea', 'runConfigurations', 'JIRA_OnDemand.xml']),
                  os.sep.join(['.', '.idea', 'runConfigurations', 'Selenium_Tests.xml']),
                  os.sep.join(['.', '.idea', 'artifacts', 'JIRA.xml']),
                  os.sep.join(['jira-ide-support', 'src', 'main', 'resources', 'jira.idea.properties'])]

        dirs = [ os.sep.join(['.', 'tomcatBase']), os.sep.join(['.', 'classes']) ]
        for file in files: self.fileutils.expect_file_exists(file, toReturn=True)
        for dir in dirs: self.fileutils.expect_dir_exists(dir, toReturn=True)
        process_clean(self.logger, self.fileutils)

        # the existing files/dirs should be deleted, but only those.
        for file in files: self.assertTrue(self.fileutils.verify_remove(file))
        self.assertEqual(len(files), self.fileutils.callcount_remove())
        for dir in dirs: self.assertTrue(self.fileutils.verify_remove_dir(dir))
        self.assertEqual(len(dirs), self.fileutils.callcount_remove_dir())

    def test_javac_memory_setter(self):

        file = os.sep.join(['.idea', 'compiler.xml'])
        element = XML.Element('project', {'version':'4'})

        self.xml.expect_parse(file, element)
        process_compiler_settings(Mock(force=False), self.xml)(self.logger)

        def expect_javac_settings(how_much):
            self.assertEqual(len(element), 2)
            child = element[0]
            self.assertEqual(child.tag, 'component')
            self.assertEqual(len(child), 1)
            child = child[0]
            self.assertEqual(child.tag, 'option')
            self.assertIn('name', child.attrib)
            self.assertEqual(child.attrib['name'], 'MAXIMUM_HEAP_SIZE')
            self.assertIn('value', child.attrib)
            self.assertEqual(child.attrib['value'], how_much)

        # element should be created:
        expect_javac_settings('512')
        child = element[0][0]

        child.attrib['value'] = '256'
        process_compiler_settings(Mock(force=False), self.xml)(self.logger)
        expect_javac_settings('512')

        child.attrib['value'] = '768'
        process_compiler_settings(Mock(force=False), self.xml)(self.logger)
        expect_javac_settings('768')

        process_compiler_settings(Mock(force=True), self.xml)(self.logger)
        expect_javac_settings('512')

    def test_install_dev_profiles(self):

        file = os.sep.join(['.', '.idea', 'workspace.xml'])
        element = XML.Element('project', {'version':'4'})
        self.xml.expect_parse(file, element)

        process_dev_profiles(self.xml)(self.logger)

        list = element[0][0][0]
        self.assertEqual(len(list), 3)
        profiles = ['func-mode-plugins', 'pseudo-loc', 'dev-mode-plugins']
        for elem in list:
            self.assertIn(elem.attrib['value'], profiles)

        list.remove(list[0])
        process_dev_profiles(self.xml)(self.logger)
        self.assertEqual(len(list), 3)

    def test_install_run_configs(self):

        workspace_file = os.sep.join(['.idea', 'workspace.xml'])
        ws_root = XML.Element('project', {'version':'4'})
        ws_component = self.xml.produce(ws_root, ('component',  {'name': 'RunManager'}))
        for cfg in ['cfg1', 'cfg4', 'another']:
            self.xml.produce(ws_component, ('configuration', {'default': 'false', 'name': cfg}))

        num_cfg = 6
        idea_runners_file = os.sep.join(
            ['jira-ide-support', 'src', 'main', 'resources', 'ideaTemplates', 'runConfigurations.xml'])
        idea_root = XML.Element('project', {'version':'4'})
        idea_component = self.xml.produce(idea_root, ('component',  {'name': 'RunManager'}))
        for cfg in ('cfg' + str(e) for e in range(num_cfg)):
            self.xml.produce(idea_component, ('configuration', {'default': 'false', 'name': cfg}))

        self.xml.expect_parse(workspace_file, ws_root)
        self.xml.expect_parse(idea_runners_file, idea_root)

        args = Mock(force = False)
        process_run_configs(args, self.xml)(self.logger)

        self.assertEqual(len(ws_component), num_cfg + 1)
        cfg2 = self.xml.produce(ws_component, ('configuration', {'name': 'cfg2'}))
        cfg2.attrib['is-sad'] = 'notsomuch'

        process_run_configs(args, self.xml)(self.logger)
        self.assertEqual(len(ws_component), num_cfg + 1)
        cfg2 = self.xml.produce(ws_component, ('configuration', {'name': 'cfg2'}))
        self.assertEqual(cfg2.attrib['is-sad'], 'notsomuch')
        cfg2.attrib['snowflakes'] = 'omg-so-many!'

        args.force = True
        process_run_configs(args, self.xml)(self.logger)
        self.assertEqual(len(ws_component), num_cfg + 1)
        cfg2 = self.xml.produce(ws_component, ('configuration', {'name': 'cfg2'}))
        self.assertNotIn('is-sad', cfg2.attrib)
        self.assertNotIn('snowflakes', cfg2.attrib)

    def test_install_project_local_settings(self):

        workspace_codestyle_file = os.sep.join(['.idea', 'codeStyleSettings.xml'])

        element = XML.Element('project', {'version':'4'})
        self.xml.produce(element,
                         ('component', {'name': 'ProjectCodeStyleSettingsManager'}),
                         ('option', {'name': 'USE_PER_PROJECT_SETTINGS', 'value': 'true'}))
        self.xml.expect_parse(workspace_codestyle_file,element)

        process_project_local_settings(Mock(force=False), self.fileutils, self.xml)(self.logger)

        self.assertEqual(self.fileutils.callcount_copy_file(), 0)
        self.assertEqual(self.fileutils.callcount_touch(), 0)

        element = XML.Element('project', {'version':'4'})
        self.xml.produce(element,
            ('component', {'name': 'ProjectCodeStyleSettingsManager'}),
            ('option', {'name': 'USE_PER_PROJECT_SETTINGS', 'value': 'false'}))
        self.xml.expect_parse(workspace_codestyle_file,element)

        process_project_local_settings(Mock(force=False), self.fileutils, self.xml)(self.logger)

        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        self.assertTrue(self.fileutils.verify_copy_file(
            os.sep.join(['jira-ide-support', 'src', 'main', 'resources', 'ideaTemplates', 'codeStyleSettings.xml']),
            os.sep.join(['.idea', 'codeStyleSettings.xml'])))
        self.assertEqual(self.fileutils.callcount_touch(), 1)
        self.assertTrue(self.fileutils.verify_touch(os.sep.join(['.idea', 'workspace.xml'])))

    def test_installing_jmake_module(self):

        workspace_modules_file = os.sep.join(['.', '.idea', 'modules.xml'])
        element = XML.Element('project', {'version':'4'})
        self.xml.expect_parse(workspace_modules_file,  element)

        # test that it is created:
        self.fileutils = MockFileUtils()
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module_list = element[0][0]
        self.assertEqual(len(module_list), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)

        # test that it is not overridden:
        self.fileutils = MockFileUtils()
        module.attrib['angry'] = 'nerdz'
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        module = module_list[0]
        self.assertEqual(module.tag, 'module')
        self.assertIn('filepath', module.attrib)
        self.assertIn('fileurl', module.attrib)
        self.assertIn('angry', module.attrib)
        self.assertEqual(module.attrib['angry'], 'nerdz')

        # test, that it will not be created when iml file exists
        module_list.remove(module)
        self.fileutils = MockFileUtils()
        self.fileutils.expect_file_exists(os.sep.join(['.', 'jmake_src', 'jmake_src.iml']), toReturn=True)
        process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 0)
        self.assertEqual(len(module_list), 0)

        # force should override that
        process_jmake_module(Mock(force=True), self.fileutils, self.xml)(self.logger)
        self.assertEqual(self.fileutils.callcount_copy_file(), 1)
        self.assertEqual(len(module_list), 1)
class TestBundledPluginsUtility(TestCase):
    @classmethod
    def setUpClass(cls):
        LOG.set_none()

    def setUp(self):
        self.file_utils = MockFileUtils()
        self.finder = BundledPluginsUtility(self.file_utils)

    def test_bundled_plugins_recompilation_when_no_list(self):
        plugins = self.finder.find_plugins_to_recompile([])

        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])

    def test_bundled_plugins_list_is_not_up_to_date(self):
        self.file_utils.expect_file_exists(BUNDLED_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_getmtime(BUNDLED_PLUGINS_LIST, toReturn=100)
        self.file_utils.expect_getmtime(BUNDLED_PLUGINS_POM, toReturn=101)

        plugins = self.finder.find_plugins_to_recompile([])
        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])

    def test_bundled_plugins_recompilation_when_not_all_in_local_repo(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=['file1', 'file2'])
        self.file_utils.expect_file_exists('file1', toReturn=True)
        plugins = self.finder.find_plugins_to_recompile([])
        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])

    def test_bundled_plugins_recompilation_profiles_does_not_equal(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=['file1', 'file2'])
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=['profiles1', 'profile2'])
        self.file_utils.expect_file_exists('file1', toReturn=True)
        self.file_utils.expect_file_exists('file2', toReturn=True)
        plugins = self.finder.find_plugins_to_recompile(['profile1', 'profile3'])
        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])

    def test_bundled_plugins_recompilation_when_old_profiles_files_does_not_exists(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=['file1', 'file2'])
        self.file_utils.expect_file_exists('file1', toReturn=True)
        self.file_utils.expect_file_exists('file2', toReturn=True)
        plugins = self.finder.find_plugins_to_recompile(['profile1', 'profile3'])
        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])

    def test_bundled_plugins_are_not_recompiled_when_profiles_equal(self):
        self.prepare_bp_ok()

        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=['file1', 'file2'])
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=['profile1', 'profile2'])
        self.file_utils.expect_file_exists('file1', toReturn=True)
        self.file_utils.expect_file_exists('file2', toReturn=True)
        plugins = self.finder.find_plugins_to_recompile(['profile2', 'profile1'])
        self.assertListEqual(plugins, [])

    def test_plugin_should_be_recompile_when_jar_does_not_exist(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST,
            toReturn=[JIRA_PLUGINS_DIR_ABS + os.sep + 'module1' + os.sep + 'target' + os.sep + 'test.jar'])
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=[])
        plugins = self.finder.find_plugins_to_recompile([])

        self.assertListEqual(plugins, ['jira-components/jira-plugins/module1'])

    def test_plugin_should_be_recompiled_when_src_newer_than_target(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=[])
        module_name = 'module1'
        plugin_jar = JIRA_PLUGINS_DIR_ABS + os.sep + module_name + os.sep + 'target' + os.sep + 'test.jar'
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=[plugin_jar])
        self.file_utils.expect_file_exists(plugin_jar, toReturn=True)
        self.file_utils.expect_getmtime('root' + os.sep + 'src', toReturn=200)
        self.file_utils.expect_getmtime('root' + os.sep + 'target', toReturn=100)
        self.file_utils.expect_walk(PLUGIN_TARGET_DIR.format(module_name), {'files': ['target']})
        self.file_utils.expect_walk(PLUGIN_SRC_DIR.format(module_name), {'files': ['src']})
        plugins = self.finder.find_plugins_to_recompile([])

        self.assertListEqual(plugins, ['jira-components/jira-plugins/module1'])

    def test_plugin_should_be_recompiled_when_jar_older_than_source(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=[])
        module_name = 'module1'
        plugin_jar = JIRA_PLUGINS_DIR_ABS + os.sep + module_name + os.sep + 'target' + os.sep + 'test.jar'
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=[plugin_jar])
        self.file_utils.expect_file_exists(plugin_jar, toReturn=True)
        self.file_utils.expect_getmtime('root' + os.sep + 'src', toReturn=200)
        self.file_utils.expect_getmtime('root' + os.sep + 'target', toReturn=300)
        self.file_utils.expect_getmtime(plugin_jar, toReturn=100)
        self.file_utils.expect_walk(PLUGIN_TARGET_DIR.format(module_name), {'files': ['target']})
        self.file_utils.expect_walk(PLUGIN_SRC_DIR.format(module_name), {'files': ['src']})
        plugins = self.finder.find_plugins_to_recompile([])

        self.assertListEqual(plugins, ['jira-components/jira-plugins/module1'])

    def test_plugin_should_be_recompiled_when_pom_newer_than_target(self):
        module_name = 'module1'
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=[])
        plugin_jar = JIRA_PLUGINS_DIR_ABS + os.sep + module_name + os.sep + 'target' + os.sep + 'test.jar'
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=[plugin_jar])
        self.file_utils.expect_file_exists(plugin_jar, toReturn=True)
        self.file_utils.expect_getmtime('root' + os.sep + 'src', toReturn=100)
        self.file_utils.expect_getmtime('root' + os.sep + 'target', toReturn=200)
        self.file_utils.expect_getmtime(PLUGIN_POM_DIR.format(module_name), toReturn=300)
        self.file_utils.expect_walk(PLUGIN_TARGET_DIR.format(module_name), {'files': ['target']})
        self.file_utils.expect_walk(PLUGIN_SRC_DIR.format(module_name), {'files': ['src']})

        plugins = self.finder.find_plugins_to_recompile([])

        self.assertListEqual(plugins, ['jira-components/jira-plugins/module1'])

    def test_remember_plugin_profiles_writes_file(self):
        profiles = ['p1', 'p2', 'p3']

        self.finder.remember_plugins_profiles(profiles)
        self.assertTrue(self.file_utils.verify_write_lines(JMAKE_PROFILES_PLUGINS_LIST, profiles))

    def prepare_bp_ok(self):
        self.file_utils.expect_file_exists(BUNDLED_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_getmtime(BUNDLED_PLUGINS_LIST, toReturn=101)
        self.file_utils.expect_getmtime(BUNDLED_PLUGINS_POM, toReturn=100)

    def prepare_plugin_that_should_not_be_recompiled(self):
        """
        This method adds some noise to files structure to make sure that 'good' plugins are not recompiled
        """
        module_name = 'goodModule'
        plugin_jar = JIRA_PLUGINS_DIR_ABS + os.sep + module_name + os.sep + 'target' + os.sep + 'test.jar'
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=[plugin_jar])
        self.file_utils.expect_file_exists(plugin_jar, toReturn=True)
        self.file_utils.expect_getmtime('root' + os.sep + 'srcGood', toReturn=100)
        self.file_utils.expect_getmtime('root' + os.sep + 'targetGood', toReturn=200)
        self.file_utils.expect_getmtime(PLUGIN_POM_DIR.format(module_name), toReturn=150)
        self.file_utils.expect_walk(PLUGIN_TARGET_DIR.format(module_name), {'files': ['targetGood']})
        self.file_utils.expect_walk(PLUGIN_SRC_DIR.format(module_name), {'files': ['srcGood']})

    def test_bundled_plugins_recompilation_added_new_profile(self):
        self.prepare_bp_ok()
        self.prepare_plugin_that_should_not_be_recompiled()
        self.file_utils.expect_read_lines(BUNDLED_PLUGINS_LIST, toReturn=['file1', 'file2'])
        self.file_utils.expect_file_exists(JMAKE_PROFILES_PLUGINS_LIST, toReturn=True)
        self.file_utils.expect_read_lines(JMAKE_PROFILES_PLUGINS_LIST, toReturn=['profile1', 'profile2'])
        self.file_utils.expect_file_exists('file1', toReturn=True)
        self.file_utils.expect_file_exists('file2', toReturn=True)
        plugins = self.finder.find_plugins_to_recompile(['profile1', 'profile2', 'profile3'])
        self.assertListEqual(plugins, ['jira-components/jira-plugins/jira-bundled-plugins'])
 def setUp(self):
     self.file_utils = MockFileUtils()
     self.finder = BundledPluginsUtility(self.file_utils)
    def test_generate_report(self):
        git = Mock().expect_generate_annotated_commits_with_details(
            "jira-stats",
            toReturn=[
                {
                    'hash':
                    '12121212',
                    'shorthash':
                    '1212',
                    'commiter':
                    'Mark <unit@tester>',
                    'date':
                    1371114910,
                    'note':
                    '{"metrics": {"test.metric": 1}, "build_number": "BN-2"}'
                },
                {
                    'hash':
                    '58585858',
                    'shorthash':
                    '5858',
                    'commiter':
                    'Frank <frank@localhost>',
                    'date':
                    1371111910,
                    'note':
                    '{"metrics": {"test.metric": 3}, "build_number": "BN-1"}'
                },
            ]).ordered_get_commit_details("jira-stats",
                                          toReturn={
                                              'hash': '34343434',
                                              'shorthash': '3434',
                                              'commiter':
                                              'Unit Tester <unit@tester>',
                                              'date': 1371114916,
                                              'note': ''
                                          })

        json_writter = Mock().ordered_as_str(
            {
                'points': [{
                    'metrics': {
                        'test.metric': 3
                    },
                    'commits': [{
                        'hash': '58585858',
                        'shorthash': '5858',
                        'commiter': 'Frank <frank@localhost>',
                        'date': 1371111910
                    }],
                    'date':
                    1371111910,
                    'build_number':
                    'BN-1'
                }, {
                    'metrics': {
                        'test.metric': 1
                    },
                    'commits': [{
                        'hash': '12121212',
                        'shorthash': '1212',
                        'commiter': 'Mark <unit@tester>',
                        'date': 1371114910
                    }],
                    'date':
                    1371114910,
                    'build_number':
                    'BN-2'
                }, {
                    'metrics': {
                        'test.metric': 2
                    },
                    'commits': [{
                        'date': 1371114916,
                        'commiter': 'Unit Tester <unit@tester>',
                        'shorthash': '3434',
                        'hash': '34343434'
                    }],
                    'date':
                    1371114916,
                    'build_number':
                    'current'
                }]
            },
            toReturn='__JSON__')

        file_utils = MockFileUtils()
        log = Mock()

        current_metrics = DataBean(metrics={"test.metric": 2})

        processor = MetricsProcessor()
        generate_report_closure = processor.generate_report(
            current_metrics, file_utils, git, json_writter)
        generate_report_closure(log)
        self.assertTrue(
            file_utils.verify_write_lines(
                os.sep.join(['target', 'eh-metrics-report', 'js', 'data.js']),
                [
                    '(function() { var data = __JSON__; executeReport(data); })();'
                ]))
 def setUp(self):
     self.legacyOdSetup = LegacyOdSvnSetup(Mock(layout=Mock().expect_studio_svn_link(toReturn='/tmp/link')
                                                .expect_studio_svn_dir(toReturn='/tmp/svn')))
     self.fs = MockFileUtils()
Example #48
0
class TestEhMetrics(TestCase):
    def setUp(self):
        super().setUp()
        self.git = Mock()
        self.git.expect_get_remotes(toReturn=['stash!'])

        self.json = Mock()
        self.json.default_as_str(SERIALIZED_METRICS)

        self.fs = MockFileUtils()
        self.executor = []
        self.metrics_processor = Mock()

        self.called_process_metrics = False

        def call_process_metrics(_):
            self.called_process_metrics = True
            return Callable.success

        self.metrics_processor.default_process_metrics(call_process_metrics)

        self.called_generate_report = False

        def call_generate_report(_):
            self.called_generate_report = True
            return Callable.success

        self.metrics_processor.default_generate_report(call_generate_report)

        self.called_check_values = False

        def call_check_values(_):
            self.called_check_values = True
            return Callable.success

        self.metrics_processor.default_check_values(call_check_values)

        self.ehmetrics = EhMetrics(git=self.git, fs=self.fs,
                                   metrics_processor=self.metrics_processor, json_writer=self.json)

        self.executor_cursor = 0

    def args(self, verbose=False, log=Mock(), branch=None, buildno=None, fast=False, note=False,
                    matching=None, non_interactive=False):
        return Mock(verbose=verbose, log=log, branch=branch, buildno=buildno, fast=fast, note=note,
                    matching=matching, non_interactive=non_interactive)

    def test_basic_report_generation(self):
        self.ehmetrics(self.args(), self.executor)

        self.__next_remotes()
        self.__next_fetch_notes()
        self.__next_is_clean_workspace()
        self.__next_clean_logs()
        self.__next_record_commit()
        self.__next_process_metrics()
        self.__next_generate_report()
        self.__next_check_values()
        self.__finished()

    def test_remote_set_when_no_remote_present(self):
        self.git.expect_get_remotes(toReturn=[])

        self.ehmetrics(self.args(), self.executor)

        self.__next_remotes(withSet=True)
        self.__next_fetch_notes()
        self.__next_is_clean_workspace()
        self.__next_record_commit()
        self.__next_clean_logs()
        self.__next_process_metrics()
        self.__next_generate_report()
        self.__next_check_values()
        self.__finished()

    def test_failed_branch_check_stops_executor(self):
        self.git.expect_current_branch(toReturn='master')
        self.ehmetrics(self.args(branch='snowflake'), self.executor)

        self.__next_remotes()
        self.__next_fetch_notes()
        self.assertNotEqual(Callable.success, self.__next_check_branch())

    def test_branch_check(self):
        self.git.expect_current_branch(toReturn='snowflake')
        self.ehmetrics(self.args(branch='snowflake'), self.executor)

        self.__next_remotes()
        self.__next_fetch_notes()
        self.assertEqual(Callable.success, self.__next_check_branch())
        self.__next_is_clean_workspace()
        self.__next_record_commit()
        self.__next_clean_logs()
        self.__next_process_metrics()
        self.__next_generate_report()
        self.__next_check_values()
        self.__finished()

    def test_produce_and_push_notes(self):
        self.git.expect_is_clean_workspace(toReturn=True)
        self.ehmetrics(self.args(note=True), self.executor)

        self.__next_remotes()
        self.__next_fetch_notes()
        self.assertEqual(Callable.success, self.__next_is_clean_workspace())
        self.__next_record_commit()
        self.__next_clean_logs()
        self.__next_process_metrics()
        self.__next_generate_report()
        self.__next_check_values()
        self.__next_set_user()
        self.__next_put_notes()
        self.__next_push_notes('jira-stats')
        self.__finished()

    def test_will_not_push_on_workspace_with_changes(self):
        self.git.expect_is_clean_workspace(toReturn=False)
        self.ehmetrics(self.args(note=True), self.executor)

        self.__next_remotes()
        self.__next_fetch_notes()
        self.assertNotEqual(Callable.success, self.__next_is_clean_workspace())

    def test_using_developer_connection(self):
        scm_header = 'scm:git:'
        myserver = 'bitbucket.wonderland.universe.com'

        pom_parser = Mock()
        pom_parser.expect_get_developer_connection(toReturn=(scm_header + myserver))

        self.ehmetrics.set_remote(Mock(), pom_parser=pom_parser)
        self.assertTrue(self.git.verify_set_remote('origin', myserver))

    def __next(self):
        self.assertLess(self.executor_cursor, len(self.executor),
                        'No more executor jobs! Only %s jobs produced.' % str(len(self.executor)))
        rc = self.executor[self.executor_cursor](Mock())
        self.executor_cursor += 1
        return rc

    def __finished(self):
        self.assertEqual(self.executor_cursor, len(self.executor),
                         'there are more executor jobs than %s!' % str(self.executor_cursor))

    def __next_remotes(self, withSet=False):
        self.assertFalse(self.git.verify_get_remotes())
        self.assertEqual(0, self.git.callcount_set_remote())
        self.__next()
        self.assertTrue(self.git.verify_get_remotes())
        self.assertEqual(1 if withSet else 0, self.git.callcount_set_remote())

    def __next_fetch_notes(self, ref: str='*'):
        self.assertEqual(0, self.git.callcount_fetch_notes())
        self.__next()
        self.assertTrue(self.git.verify_fetch_notes(ref))

    def __next_is_clean_workspace(self):
        self.assertFalse(self.git.verify_is_clean_workspace())
        rc = self.__next()
        self.assertTrue(self.git.verify_is_clean_workspace())
        return rc

    def __next_record_commit(self):
        self.git.expect_current_commit(toReturn='0011')
        self.fs.expect_write_lines(os.sep.join([self.fs.existing_dir(MetricsCollector.log_directory),'.commit']), '0011', toReturn=None)
        self.__next()
        self.git.verify_write_lines(os.sep.join([self.fs.existing_dir(MetricsCollector.log_directory),'.commit']), '0011')

    def __next_clean_logs(self):
        self.fs.expect_dir_exists(MetricsCollector.log_directory, toReturn=True)
        self.fs.expect_remove_dir(MetricsCollector.log_directory, toReturn=None)
        self.__next()
        self.fs.verify_remove_dir(MetricsCollector.log_directory)

    def __next_process_metrics(self):
        self.assertFalse(self.called_process_metrics)
        self.__next()
        self.assertTrue(self.called_process_metrics)

    def __next_generate_report(self):
        self.assertFalse(self.called_generate_report)
        self.__next()
        self.assertTrue(self.called_generate_report)

    def __next_check_values(self):
        self.assertFalse(self.called_check_values)
        self.__next()
        self.assertTrue(self.called_check_values)

    def __next_check_branch(self):
        self.assertFalse(self.git.verify_current_branch())
        rc = self.__next()
        self.assertTrue(self.git.verify_current_branch())
        return rc

    def __next_set_user(self):
        self.assertEqual(0, self.git.callcount_set_user())
        self.__next()
        self.assertEqual(1, self.git.callcount_set_user())

    def __next_put_notes(self):
        self.assertEqual(0, self.git.callcount_put_notes())
        self.__next()
        self.assertTrue(self.git.verify_put_notes(SERIALIZED_METRICS, 'jira-stats', 'HEAD', True))

    def __next_push_notes(self, ref:str):
        self.assertFalse(self.git.verify_push_notes(ref))
        self.__next()
        self.assertTrue(self.git.verify_push_notes(ref))