def __init__(self, metrics_name: str = 'deprecation.methods', description: str = 'Deprecated methods usage'): #noinspection PyTypeChecker super().__init__(metrics_name, description, MockFileUtils()) self.maven_runs = [] self.return_code = 0
def test_file_and_size(self): filter = lambda x: x.startswith('t') m = MockFileUtils() for word, size in TestMetrics.numbers: m.expect_file_size(word, toReturn=size) obj = FileCountAndSize('test', filter, file_utils=m, metrics_logger=Mock()).configure(Mock(), False) obj.pre_files_scan('test-module') for word, size in TestMetrics.numbers: self.assertFalse( obj.wants_file(word), 'File And Size Count metrics cannot ask to open files.') obj.post_files_scan('test-module') files = 0 bytes = 0 for word, size in TestMetrics.numbers: if filter(word): bytes += size files += 1 self.assertEqual(files, obj.get_values()['test.count'].value) self.assertEqual(bytes, obj.get_values()['test.cumulativesize'].value)
def test_check_values_with_unchecked(self): processor = MetricsProcessor() args = Mock(non_interactive=True) git = Mock() git.expect_latest_annotated_commit_with_details( STATS_REF_NAME, toReturn={ 'note': '{"metrics":{"key1": {"value": 10, "description": ""}, ' '"key2": {"value": 10, "description": ""}}}', 'hash': '123456', 'commiter': 'me' }) git.expect_generate_annotated_commits_with_details( STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[]) metrics = DataBean() metrics.metrics = { 'key1': DataBean(value=12, description='', checked=False, direction=FALLING) } metrics.metrics = { 'key1': DataBean(value=8, description='', checked=False, direction=RISING) } self.assertEqual( Callable.success, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
def test_check_values_rising(self): processor = MetricsProcessor() args = Mock(non_interactive=True) git = Mock() git.expect_latest_annotated_commit_with_details( STATS_REF_NAME, toReturn={ 'note': '{"metrics":{"key1": {"value": 20, "description": ""}}}', 'hash': 'abcdef', 'commiter': 'me' }) git.expect_generate_annotated_commits_with_details( STATS_EXCLUSION_REF_NAME, commit_range='abcdef..HEAD', toReturn=[]) metrics = DataBean() metrics.metrics = { 'key1': DataBean(value=10, description='', checked=True, direction=RISING) } self.assertEqual( Callable.do_not_proceed, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
def test_check_values_respects_other_violations(self): processor = MetricsProcessor() args = Mock(non_interactive=True) git = Mock() git.expect_latest_annotated_commit_with_details( STATS_REF_NAME, toReturn={ 'note': '{"metrics":{"key1": {"value": 10, "description": ""},' '"key2": {"value": 10, "description": ""}}}', 'hash': '123456', 'commiter': 'me' }) exclusion = { 'note': '{"committer": "testbot", "reason": "none", "exclusion": {"key1": 15}}' } git.expect_generate_annotated_commits_with_details( STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[exclusion]) metrics = DataBean() metrics.metrics = { 'key1': DataBean(value=8, description='', checked=True, direction=FALLING), 'key2': DataBean(value=12, description='', checked=True, direction=FALLING) } self.assertEqual( Callable.do_not_proceed, processor.check_values(args, metrics, git, MockFileUtils())(Mock()))
def test_check_values_generates_tests_report_when_no_previous_metrics( self): test_suite_logger = Mock() test_suite_logger_factory_mock = Mock().ordered_new_logger( "ehMetrics", toReturn=test_suite_logger) test_suite_logger.ordered_success('key2', 0, 'EHMetrics', toReturn=None) test_suite_logger.ordered_success('key3', 0, 'EHMetrics', toReturn=None) test_suite_logger.ordered_save(toReturn=None) processor = MetricsProcessor(test_suite_logger_factory_mock) metrics = DataBean( metrics={ 'key2': DataBean( value=10, description='', checked=True, direction=FALLING), 'key3': DataBean( value=10, description='', checked=True, direction=FALLING) }) args = Mock(non_interactive=True) self.assertEqual( Callable.success, processor.check_values(args, metrics, Mock(), MockFileUtils())(Mock())) test_suite_logger.verify_all_ordered()
def test_process_local_test_settings(self): fileutils = MockFileUtils() xml_location_key = '${test.xml.location}' file1 = os.sep.join(['.', 'jira-func-tests', 'src', 'main', 'resources', 'localtest.properties']) file2 = os.sep.join(['.', 'jira-webdriver-tests', 'src', 'main', 'resources', 'localtest.properties']) file3 = os.sep.join( ['.', 'jira-distribution', 'jira-integration-tests', 'src', 'main', 'resources', 'localtest.properties']) template = os.sep.join(['jira-func-tests', 'src', 'main', 'resources', 'localtest.template']) fileutils.expect_file_exists(file2, toReturn=True) fileutils.expect_file_exists(file3, toReturn=True) args = MocArgs() args.port = 98765 args.jira_context = 'substituteThis' process_local_test_settings(args, fileutils)(Mock()) # overwrite all each time: filtered_files = fileutils.filtered_files self.assertEqual(3, len(filtered_files)) for f in [file1, file2, file3]: rpl = fileutils.verify_filter_file(template, f) self.assertIsNotNone(rpl) self.assertIn('${jira.port}', rpl) self.assertEqual(str(args.port), rpl['${jira.port}']) self.assertIn("${jira.context}", rpl) self.assertEqual(args.jira_context, rpl['${jira.context}']) self.assertIn(xml_location_key, rpl) self.assertIn('jira-func-tests', fileutils.verify_filter_file(template, file1)[xml_location_key]) self.assertIn('jira-webdriver-tests', fileutils.verify_filter_file(template, file2)[xml_location_key]) self.assertIn('jira-func-tests', fileutils.verify_filter_file(template, file3)[xml_location_key])
def test_simple_startup_without_options(self): #having tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils()) #when tomcat_starter(Mock()) #then #noinspection PyUnresolvedReferences self.assertEqual(tomcat_starter._SystemCallable__command, self.layout.tomcat_executable() + ' run') self.assertTrue(tomcat_starter.executed, 'Process should have been executed') opts = self.__get_opts_dict(tomcat_starter) self.__assertParameter(opts, '-Djira.home', self.layout.jira_home()) self.__assertParameter(opts, '-Djira.plugins.bundled.disable', 'false') self.__assertParameter( opts, '-Djira.dev.bundledplugins.url', 'file://' + BundledPluginsUtility.BUNDLED_PLUGINS_LIST) self.__assertParameter(opts, '-Xms128m') self.__assertParameter(opts, '-Xmx1024m') self.__assertParameter(opts, '-XX:MaxPermSize', '384m') self.assertNotIn('-Djira.rebel.root', opts) self.assertNotIn('-javaagent:', opts) self.assertNotIn('-agentlib:jdwp:', opts) #now check tomcat environment variables self.__assertParameter(tomcat_starter.env, 'CATALINA_HOME', self.layout.tomcat_dir(False)) self.__assertParameter(tomcat_starter.env, 'CATALINA_TMPDIR', self.layout.tomcat_temp_dir()) self.__assertParameter(tomcat_starter.env, 'CATALINA_BASE', self.layout.tomcat_work_dir())
def test_installing_jmake_module(self): workspace_modules_file = os.sep.join(['.', '.idea', 'modules.xml']) element = XML.Element('project', {'version': '4'}) self.xml.expect_parse(workspace_modules_file, element) # test that it is created: self.fileutils = MockFileUtils() process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger) self.assertEqual(self.fileutils.callcount_copy_file(), 1) module_list = element[0][0] self.assertEqual(len(module_list), 1) module = module_list[0] self.assertEqual(module.tag, 'module') self.assertIn('filepath', module.attrib) self.assertIn('fileurl', module.attrib) # test that it is not overridden: self.fileutils = MockFileUtils() module.attrib['angry'] = 'nerdz' process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger) self.assertEqual(self.fileutils.callcount_copy_file(), 1) module = module_list[0] self.assertEqual(module.tag, 'module') self.assertIn('filepath', module.attrib) self.assertIn('fileurl', module.attrib) self.assertIn('angry', module.attrib) self.assertEqual(module.attrib['angry'], 'nerdz') # test, that it will not be created when iml file exists module_list.remove(module) self.fileutils = MockFileUtils() self.fileutils.expect_file_exists(os.sep.join( ['.', 'jmake_src', 'jmake_src.iml']), toReturn=True) process_jmake_module(Mock(force=False), self.fileutils, self.xml)(self.logger) self.assertEqual(self.fileutils.callcount_copy_file(), 0) self.assertEqual(len(module_list), 0) # force should override that process_jmake_module(Mock(force=True), self.fileutils, self.xml)(self.logger) self.assertEqual(self.fileutils.callcount_copy_file(), 1) self.assertEqual(len(module_list), 1)
def test_horde_is_not_run_when_one_is_already_running(self): horde_runner = MockHordeRunner(self.args, MockFileUtils(), Mock(), self.horde_status_checker.running()) horde_runner(Mock()) self.assertFalse( horde_runner.executed, "Parent class should not be executed when another instance is already running" )
def test_bundled_plugins_removes_jira_dev_bundledplugins_url(self): self.args.bundled_plugins = True #having tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils()) #when tomcat_starter(Mock()) #then opts = self.__get_opts_dict(tomcat_starter) self.assertNotIn('-Djira.dev.bundledplugins.url', opts)
def test_setup_home_dir_when_already_exists(self): file_utils = MockFileUtils() file_utils.ordered_file_exists('some-home', toReturn=True) args = MocArgs(setup_home=True) args.layout.expect_jira_home(toReturn='some-home') setup_jira_home = SetupJiraHomeHsqldb(args, file_utils) return_code = setup_jira_home(Mock()) self.assertEqual(return_code, Callable.success, 'Invalid return code') file_utils.verify_all_ordered()
def setUp(self): self.fs = MockFileUtils() self.url = Mock() self.manifesto = ManifestoUtils(url_utils=self.url, fs=self.fs) self.url.expect_read( 'https://manifesto.uc-inf.net/api/env/jirastudio-dev', None, None, toReturn='{ "hash": "123456" }')
def test_downloading_skipped_when_tomcat_in_place(self): #having mock_file_utils = MockFileUtils() tomcat_downloader = MockTomcatDownloader(self.args, file_utils=mock_file_utils) mock_file_utils.expect_file_exists(self.layout.tomcat_dir(False), toReturn=True) #when tomcat_downloader(Mock()) #then self.assertFalse(tomcat_downloader.executed, 'Downloading task was run')
def test_clean_removes_old_home(self): self.args.mvn_clean = True file_utils = MockFileUtils() process_utils = Mock().default_check_output( 'Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode( )) horde_runner = MockHordeRunner(self.args, file_utils, process_utils, self.horde_status_checker.not_running()) horde_runner(Mock()) self.assertTrue( file_utils.verify_remove_dir( self.horde_layout.horde_home_dir(False)))
def test_debug_parameter_should_add_agentlib_jdwp_param(self): #having tomcat_starter = MockTomcatStarter(self.args, True, MockFileUtils()) self.args.debug_port = 8546 #when tomcat_starter(Mock()) #then opts = self.__get_opts_dict(tomcat_starter) self.__assertParameter( opts, '-agentlib:jdwp', 'transport=dt_socket,server=y,suspend=n,address=8546') self.assertNotIn('-javaagent:', opts)
def test_validate_workspace_params_happy_path(self): fileutils = MockFileUtils() workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace' jira_project_dir = 'jira-project' pom_xml = 'pom.xml' fileutils.default_getcwd(os.sep.join([workspace_dir, jira_project_dir])) fileutils.expect_file_exists(os.sep.join([workspace_dir, pom_xml]), toReturn=True) fileutils.expect_get_parent_dir_path(toReturn=workspace_dir) callable_status = WorkspaceValidator(fileutils)(Mock()) self.assertEqual(callable_status, Callable.success)
def test_check_values_generates_tests_report_with_previous_metrics(self): test_suite_logger = Mock() test_suite_logger_factory_mock = Mock().ordered_new_logger( "ehMetrics", toReturn=test_suite_logger) test_suite_logger.ordered_failed( 'key2', 0, 'EHMetrics', 'metric key2 () increased from 9 to 10.', toReturn=None) test_suite_logger.ordered_success('key3', 0, 'EHMetrics', toReturn=None) test_suite_logger.ordered_save(toReturn=None) processor = MetricsProcessor(test_suite_logger_factory_mock) metrics = DataBean( metrics={ 'key2': DataBean( value=10, description='', checked=True, direction=FALLING), 'key3': DataBean( value=10, description='', checked=True, direction=FALLING) }) git = Mock() git.ordered_latest_annotated_commit_with_details( STATS_REF_NAME, toReturn={ 'note': '{"metrics":{"key1": {"value": 10, "description": ""}, "key2": {"value": 9, "description": ""}}}', 'hash': '123456', 'commiter': 'me' }) git.ordered_generate_annotated_commits_with_details( STATS_EXCLUSION_REF_NAME, commit_range='123456..HEAD', toReturn=[]) args = Mock(non_interactive=True) self.assertEqual( Callable.do_not_proceed, processor.check_values(args, metrics, git, MockFileUtils())(Mock())) git.verify_all_ordered() test_suite_logger.verify_all_ordered()
def test_execution_email_enabled(self): self.args.enable_mail = True #having tomcat_starter = MockTomcatStarter(self.args, False, MockFileUtils()) #when tomcat_starter(Mock()) #then self.assertTrue(tomcat_starter.executed, 'Process should have been executed') opts = self.__get_opts_dict(tomcat_starter) self.__assertParameter(opts, '-Djira.home', self.layout.jira_home()) self.__assertParameter(opts, '-Datlassian.mail.senddisabled', 'false') self.__assertParameter(opts, '-Datlassian.mail.fetchdisabled', 'false')
def test_horde_skeleton_is_not_copied_when_directory_exists(self): file_utils = MockFileUtils() process_utils = Mock().default_check_output( 'Maven:3.3.2\nJava home:jdk_home\n:Java version 1.2.3_123'.encode( )) horde_runner = MockHordeRunner(self.args, file_utils, process_utils, self.horde_status_checker.not_running()) horde_runner(Mock()) self.assertTrue( file_utils.verify_copy_tree( self.horde_layout.horde_skeleton_dir(), self.horde_layout.horde_home_dir(False))) self.assertFalse( file_utils.verify_remove_dir( self.horde_layout.horde_home_dir(False)))
def test_validate_workspace_params_fails_without_workspace(self): fileutils = MockFileUtils() workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace' jira_project_dir = 'jira-project' pom_xml = 'pom.xml' fileutils.expect_getcwd(toReturn=os.sep.join([workspace_dir, jira_project_dir])) fileutils.expect_file_exists(os.sep.join([workspace_dir, pom_xml]), toReturn=False) fileutils.expect_get_parent_dir_path(toReturn=workspace_dir) logger = Mock() callable_status = WorkspaceValidator(fileutils)(logger) self.assertEqual(logger.callcount_error(), 1) self.assertEqual(callable_status, Callable.failure)
def test_save(self): fs = MockFileUtils() expected_content = [ '<?xml version="1.0" encoding="UTF-8"?>', '<testsuite failures="0" time="0" errors="0" skipped="0" tests="0" name="suiteName">', '<properties/>', '</testsuite>' ] expected_file_name = os.sep.join( [SurefireSuiteLogger.target_dir, "TEST-suiteName.xml"]) fs.expect_write_lines(expected_file_name, expected_content, toReturn=None) logger = SurefireSuiteLogger('suiteName', fs) logger.save() self.assertTrue( fs.verify_write_lines(expected_file_name, expected_content))
def test_maven_download_executed_when_tomcat_does_not_exist(self): #having mock_file_utils = MockFileUtils() tomcat_downloader = MockTomcatDownloader(self.args, file_utils=mock_file_utils) #when tomcat_downloader(Mock()) #then self.assertTrue(tomcat_downloader.executed, 'Downloading task was expected to execute') self.assertListEqual(tomcat_downloader.projects, ['jira-ide-support']) self.assertListEqual(tomcat_downloader.profiles, ['download-tomcat', 'ide-setup']) self.assertListEqual(tomcat_downloader.phases, ['initialize']) self.assertDictEqual(tomcat_downloader.properties, {'tomcat.dir': self.layout.tomcat_download_dir()})
def test_execution_is_suppressed_when_jrebel_not_discovered(self): #having self.args.jrebel = True tomcat_starter = MockTomcatStarter( self.args, False, MockFileUtils().expect_possible_idea_plugin_dirs(toReturn=[])) log = Mock() #when return_code = tomcat_starter(log) #then self.assertEqual(return_code, Callable.do_not_proceed, 'Expected successful return code') self.assertTrue(log.callcount_error() > 0) # verify, that jrebel related error message was logged: self.assertTrue( reduce(lambda r, e: r or 'jrebel' in e, log.error.made_calls, False))
def test_setup_home_dir_has_to_unzip_and_filter_file(self): file_utils = MockFileUtils() jira_home = 'some-home' file_utils.expect_file_exists(jira_home, toReturn=False) args = Mock(layout=Mock().expect_jira_home(toReturn=jira_home)) setup_jira_home = SetupJiraHomeHsqldb(args, file_utils) return_code = setup_jira_home(Mock()) self.assertEqual(return_code, Callable.success, 'Invalid return code') self.verify_template_is_filtered_and_removed(file_utils, os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_CONFIG]), os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_CONFIG_TEMPLATE])) self.verify_template_is_filtered_and_removed(file_utils, os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_DIRECTORY, SetupJiraHomeHsqldb.DB_SCRIPT]), os.sep.join([jira_home, SetupJiraHomeHsqldb.DB_DIRECTORY, SetupJiraHomeHsqldb.DB_SCRIPT_TEMPLATE]))
def _test_diff_metrics_impl(self, metrics_to_compare, files_now, files_before, expected_result, file_contents): file_utils = MockFileUtils() files_now = map(lambda f: MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f, files_now) files_before = map(lambda f: MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % f, files_before) file_utils.expect_listdir('now', toReturn=files_now) file_utils.expect_listdir('before', toReturn=files_before) for file_name, file_contents in file_contents.items(): for when in ['before', 'now']: if file_contents[when] is not None: file_utils.expect_read_lines(os.sep.join([ when, MetricsDiff.HIT_LOG_FILE_NAME_PATTERN % file_name ]), toReturn=file_contents[when]) log = Mock() #noinspection PyTypeChecker md = MetricsDiff(file_utils) diffs = list(md.diff_metrics(log, 'now', 'before', metrics_to_compare)) self.assertEqual(expected_result, diffs)
def test_jrebel_is_discovered_and_java_opts_are_set(self): #having self.args.jrebel = True file_utils = MockFileUtils() file_utils.expect_possible_idea_plugin_dirs( toReturn=['idea12', 'idea13', 'idea129']) jrebel_path = os.path.join('idea13', 'config', 'plugins', 'jr-ide-idea', 'lib', 'jrebel', 'jrebel.jar') file_utils.expect_file_exists(jrebel_path, toReturn=True) tomcat_starter = MockTomcatStarter(self.args, False, file_utils) #when return_code = tomcat_starter(Mock()) #then self.assertEqual(return_code, Callable.success, 'Expected successful return code') opts = self.__get_opts_dict(tomcat_starter) self.__assertParameter(opts, '-javaagent:"' + jrebel_path + '"') self.__assertParameter(opts, '-Djira.rebel.root', '"' + PathUtils.abspath('.') + '"') self.assertNotIn('-agentlib:jdwp:', opts)
def test_transform_maven_executable_for_workspace(self): fileutils = MockFileUtils() workspace_dir = '/home/abracadabra/IdeaProjects/rest-workspace' jira_project_dir = 'jira-project' jira_rest_dir = 'jira-rest' pom_xml = 'pom.xml' fileutils.expect_get_parent_dir_relpath(workspace_dir, toReturn=jira_project_dir) fileutils.default_getcwd(os.sep.join([workspace_dir, jira_project_dir])) fileutils.expect_listdir(workspace_dir, toReturn=[jira_project_dir, jira_rest_dir, '.idea', pom_xml, 'README.txt']) fileutils.expect_dir_exists(os.sep.join([workspace_dir, jira_project_dir]), toReturn=True) fileutils.expect_dir_exists(os.sep.join([workspace_dir, jira_rest_dir]), toReturn=True) fileutils.expect_dir_exists(os.sep.join([workspace_dir, '.idea']), toReturn=True) fileutils.expect_file_exists(os.sep.join([workspace_dir, jira_project_dir, pom_xml]), toReturn=True) fileutils.expect_file_exists(os.sep.join([workspace_dir, jira_rest_dir, pom_xml]), toReturn=True) parser_mocks = {os.sep.join([workspace_dir, jira_project_dir, pom_xml]): Mock().expect_get_artifact_id(toReturn=jira_project_dir).expect_get_version(toReturn='10.0.0-SNAPSHOT'), os.sep.join([workspace_dir, jira_rest_dir, pom_xml]): Mock().expect_get_artifact_id(toReturn=jira_rest_dir).expect_get_version(toReturn='9.1.2-SNAPSHOT')} PROJECT1 = os.sep.join(['jira-components', 'jira-webapp']) PROJECT2 = os.sep.join(['jira-components', 'jira-plugins', 'jira-bundled-plugins']) maven_mock = Mock(projects = [PROJECT1, PROJECT2]) MavenCallableWorkspaceDelegator.after_init(maven_mock, workspace_dir, fileutils, pom_parser_class_object=lambda path: parser_mocks[path]) for k, v in {'jira.version': '10.0.0-SNAPSHOT', 'jira.project.version': '10.0.0-SNAPSHOT', 'jira.rest.version': '9.1.2-SNAPSHOT'}.items(): self.assertTrue(maven_mock.verify_property(k, v))
def test_ondemand_option_adds_properties(self): #having self.args.layout.ondemand = True self.args.horde_layout = HordeLayout(self.layout.jira_home()) tomcat_starter = MockTomcatStarter(self.args, True, MockFileUtils()) #when tomcat_starter(Mock()) #then opts = self.__get_opts_dict(tomcat_starter) self.__assertParameter(opts, '-Dstudio.initial.data.xml', self.layout.studio_initial_data()) self.__assertParameter(opts, '-Dstudio.home', self.layout.jira_home()) self.__assertParameter( opts, '-Datlassian.darkfeature.com.atlassian.jira.config.CoreFeatures.ON_DEMAND', 'true') self.__assertParameter( opts, '-Dcrowd.property.application.login.url', self.args.horde_layout.horde_application_login_url()) self.__assertParameter(opts, '-Dcrowd.property.crowd.server.url', self.args.horde_layout.horde_server_url()) self.__assertParameter(opts, '-Dstudio.webdav.directory', self.layout.webdav_dir()) self.assertNotIn('-Djira.dev.bundledplugins.url', opts)
def test_generate_report(self): git = Mock().expect_generate_annotated_commits_with_details( "jira-stats", toReturn=[ { 'hash': '12121212', 'shorthash': '1212', 'commiter': 'Mark <unit@tester>', 'date': 1371114910, 'note': '{"metrics": {"test.metric": 1}, "build_number": "BN-2"}' }, { 'hash': '58585858', 'shorthash': '5858', 'commiter': 'Frank <frank@localhost>', 'date': 1371111910, 'note': '{"metrics": {"test.metric": 3}, "build_number": "BN-1"}' }, ]).ordered_get_commit_details("jira-stats", toReturn={ 'hash': '34343434', 'shorthash': '3434', 'commiter': 'Unit Tester <unit@tester>', 'date': 1371114916, 'note': '' }) json_writter = Mock().ordered_as_str( { 'points': [{ 'metrics': { 'test.metric': 3 }, 'commits': [{ 'hash': '58585858', 'shorthash': '5858', 'commiter': 'Frank <frank@localhost>', 'date': 1371111910 }], 'date': 1371111910, 'build_number': 'BN-1' }, { 'metrics': { 'test.metric': 1 }, 'commits': [{ 'hash': '12121212', 'shorthash': '1212', 'commiter': 'Mark <unit@tester>', 'date': 1371114910 }], 'date': 1371114910, 'build_number': 'BN-2' }, { 'metrics': { 'test.metric': 2 }, 'commits': [{ 'date': 1371114916, 'commiter': 'Unit Tester <unit@tester>', 'shorthash': '3434', 'hash': '34343434' }], 'date': 1371114916, 'build_number': 'current' }] }, toReturn='__JSON__') file_utils = MockFileUtils() log = Mock() current_metrics = DataBean(metrics={"test.metric": 2}) processor = MetricsProcessor() generate_report_closure = processor.generate_report( current_metrics, file_utils, git, json_writter) generate_report_closure(log) self.assertTrue( file_utils.verify_write_lines( os.sep.join(['target', 'eh-metrics-report', 'js', 'data.js']), [ '(function() { var data = __JSON__; executeReport(data); })();' ]))