def test_parse_build_results_patch(self): """ Test parse_build_results with a test log indicating failure due to a a backport patch no longer applying """ def mock_util_call(cmd): del cmd def mock_conf_remove_backport_patch(patch): del patch return 1 conf = config.Config('') conf.setup_patterns() conf.remove_backport_patch = mock_conf_remove_backport_patch conf.patches = ['backport-test.patch'] reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nPatch #1 (backport-test.patch):\nSkipping patch.' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertEqual(pkg.must_restart, 1)
def check_complete(): """Checks whether the 'complete' build references every file. This is used by the build script to ensure that every file is included in at least one build type. Returns: True on success, False on failure. """ logging.info('Checking that the build files are complete...') complete = build.Build() # Normally we don't need to include @core, but because we look at the build # object directly, we need to include it here. When using main(), it will # call addCore which will ensure core is included. if not complete.parse_build(['+@complete', '+@core'], os.getcwd()): logging.error('Error parsing complete build') return False match = re.compile(r'.*\.js$') base = shakaBuildHelpers.get_source_base() all_files = shakaBuildHelpers.get_all_files(os.path.join(base, 'lib'), match) missing_files = set(all_files) - complete.include if missing_files: logging.error('There are files missing from the complete build:') for missing in missing_files: # Convert to a path relative to source base. logging.error(' ' + os.path.relpath(missing, base)) return False return True
def test_parse_build_results_failed_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing package. """ conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call open_auto_backup = build.util.open_auto build.util.call = MagicMock(return_value=None) pkg = build.Build() fm = files.FileManager(conf, pkg) with open('tests/builderrors', 'r') as f: builderrors = f.readlines() for error in builderrors: if not error.startswith('#'): input, output = error.strip('\n').split('|') reqs.buildreqs = set() build.util.open_auto = mock_open(read_data=input) pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) self.assertIn(output, reqs.buildreqs) self.assertGreater(pkg.must_restart, 0) # Restoring functions build.util.call = call_backup build.util.open_auto = open_auto_backup
def generate(self): env = environment.Environment(self.source_dir, self.build_dir, self.meson_script_file, options) mlog.initialize(env.get_log_dir()) mlog.log(mlog.bold('The Meson build system')) mlog.log('Version:', coredata.version) mlog.log('Source dir:', mlog.bold(app.source_dir)) mlog.log('Build dir:', mlog.bold(app.build_dir)) if env.is_cross_build(): mlog.log('Build type:', mlog.bold('cross build')) else: mlog.log('Build type:', mlog.bold('native build')) b = build.Build(env) intr = interpreter.Interpreter(b) intr.run() if options.backend == 'ninja': import ninjabackend g = ninjabackend.NinjaBackend(b, intr) elif options.backend == 'vs2010': import vs2010backend g = vs2010backend.Vs2010Backend(b, intr) elif options.backend == 'xcode': import xcodebackend g = xcodebackend.XCodeBackend(b, intr) else: raise RuntimeError('Unknown backend "%s".' % options.backend) g.generate() env.generating_finished() dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') pickle.dump(b, open(dumpfile, 'wb'))
def check_tests(): """Runs an extra compile pass over the test code to check for type errors. Returns: True on success, False on failure. """ print 'Checking the tests for type errors...' match = re.compile(r'.*\.js$') base = shakaBuildHelpers.get_source_base() def get(*args): return shakaBuildHelpers.get_all_files(os.path.join(base, *args), match) files = (get('lib') + get('externs') + get('test') + get('demo') + get('third_party', 'closure')) test_build = build.Build(set(files)) # Ignore missing goog.require since we assume the whole library is # already included. opts = [ '--jscomp_off=missingRequire', '--jscomp_off=strictMissingRequire', '--checks-only', '-O', 'SIMPLE' ] return test_build.build_raw(opts, is_debug=True)
def test_parse_buildroot_log_fail(self): """ Test parse_buildroot_log with a test log indicating failure due to missing dependencies ('foobar' and 'foobarbaz') """ def mock_util_call(cmd): del cmd call_backup = build.util.call build.util.call = mock_util_call open_name = 'build.util.open_auto' content = "line1\nDEBUG util.py:399: No matching package to install: 'foobar'\nDEBUG util.py:399: No matching package to install: 'foobarbaz'\nline 4" m_open = mock_open(read_data=content) pkg = build.Build() pkg.must_restart = 1 pkg.file_restart = 1 result = True with patch(open_name, m_open, create=True): result = pkg.parse_buildroot_log('testname', 1) build.util.call = call_backup self.assertFalse(result) self.assertEqual(pkg.must_restart, 0) self.assertEqual(pkg.file_restart, 0)
def test_copy_logs(mocker, mock_codebuild, mock_cw_logs, mock_bucket): mock_cw_logs.get_paginator.return_value.paginate.return_value = [ { 'events': [{ 'message': 'foo', }, { 'message': 'bar', }] }, { 'events': [{ 'message': 'baz', }, { 'message': 'blah', }] }, ] _mock_build_details('pr/123') build_obj = build.Build(_mock_build_event()) build_obj.copy_logs() mock_codebuild.batch_get_builds.assert_called_once_with(ids=[BUILD_ID]) mock_cw_logs.get_paginator.assert_called_once_with('filter_log_events') mock_cw_logs.get_paginator.return_value.paginate.assert_called_once_with( logGroupName=LOG_GROUP_NAME, logStreamNames=[LOG_STREAM_NAME]) mock_bucket.put_object.assert_called_once_with(Key=LOG_STREAM_NAME + '/build.log', Body='foobarbazblah', ContentType="text/plain")
def generator(self): """Test template.""" conf = config.Config() conf.parse_config_versions = Mock(return_value={}) name_arg = "" version_arg = "" if state == 1 or state == 3: name_arg = f"state.{name}" if state == 2 or state == 3: version_arg = f"state.{version}" content = tarball.Content(url, name_arg, version_arg, [], conf) content.config = conf pkg = build.Build('/tmp') pkg.download_path = '/download/path/' mgr = files.FileManager(conf, pkg) content.name_and_version(mgr) name_cmp = name version_cmp = version if state == 1 or state == 3: name_cmp = name_arg if state == 2 or state == 3: version_cmp = version_arg self.assertEqual(name_cmp, content.name) self.assertEqual(version_cmp, content.version) # redo without args and verify giturl is set correctly content.name = "" content.version = "" content.name_and_version(Mock()) if "github.com" in url: self.assertRegex(content.giturl, r"https://github.com/[^/]+/" + content.repo + ".git")
def check_tests(): """Runs an extra compile pass over the test code to check for type errors. Returns: True on success, False on failure. """ logging.info('Checking the tests for type errors...') match = re.compile(r'.*\.js$') base = shakaBuildHelpers.get_source_base() def get(*args): return shakaBuildHelpers.get_all_files(os.path.join(base, *args), match) files = set( get('lib') + get('externs') + get('test') + get('third_party', 'closure')) files.add(os.path.join(base, 'demo', 'common', 'assets.js')) test_build = build.Build(files) closure_opts = build.common_closure_opts + build.common_closure_defines closure_opts += build.debug_closure_opts + build.debug_closure_defines # Ignore missing goog.require since we assume the whole library is # already included. closure_opts += [ '--jscomp_off=missingRequire', '--jscomp_off=strictMissingRequire', '--checks-only', '-O', 'SIMPLE' ] return test_build.build_raw(closure_opts)
def __init__(self, app): self.callback = app self.window = tkinter.Toplevel(app.app) self.window.protocol("WM_DELETE_WINDOW", app.quit) self.previous_selections = [] for window in app.windows: # TODO items selected self.previous_selections.append(window.builder.champion.key) self.builder = build.Build() self.message_selection_champion_frame = tkinter.Frame(self.window) self.title_frame = tkinter.Frame(self.window) self.message_selection_items_frame = tkinter.Frame(self.window) self.item_modes_frame = tkinter.Frame(self.window) self.champions_frame = tkinter.Frame(self.window) self.stats_base_frame = tkinter.Frame(self.window) self.splash_art_frame = tkinter.Frame(self.window) self.stats_current_frame = tkinter.Frame(self.window) self.items_frame = tkinter.Frame(self.window) self.reset_frame = tkinter.Frame(self.window) self.build_frame = tkinter.Frame(self.window, relief='ridge', bg='yellow') self.save_frame = tkinter.Frame(self.window) self.gold_frame = tkinter.Frame(self.window) self.__make_frame_header() self.__make_frame_body() self.__make_frame_footer() self.__display_frames()
def test_parse_build_results_simple_pats(self): """ Test parse_build_results with a test log indicating failure due to a missing httpd-dev package (simple pat error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nchecking for Apache test module support\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('httpd-dev', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_parse_build_results_pkgconfig(self): """ Test parse_build_results with a test log indicating failure due to a missing qmake package (pkgconfig error) """ def mock_util_call(cmd): del cmd conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") tcontent = tarball.Content("", "", "", [], conf, "/") conf.config_opts['32bit'] = True call_backup = build.util.call build.util.call = mock_util_call pkg = build.Build() fm = files.FileManager(conf, pkg) open_name = 'build.util.open_auto' content = 'line 1\nwhich: no qmake\nexiting' m_open = mock_open(read_data=content) with patch(open_name, m_open, create=True): pkg.parse_build_results('testname', 0, fm, conf, reqs, tcontent) build.util.call = call_backup self.assertIn('pkgconfig(Qt)', reqs.buildreqs) self.assertIn('pkgconfig(32Qt)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def check_externs(): """Runs an extra compile pass over the generated externs to ensure that they are usable. Returns: True on success, False on failure. """ logging.info('Checking the usability of generated externs...') # Create a complete "build" object. externs_build = build.Build() if not externs_build.parse_build(['+@complete'], os.getcwd()): return False externs_build.add_core() # Use it to generate externs for the next check. if not externs_build.generate_externs('check'): return False # Create a custom "build" object, add all manually-written externs, then add # the generated externs we just generated. source_base = shakaBuildHelpers.get_source_base() manual_externs = shakaBuildHelpers.get_all_files( os.path.join(source_base, 'externs'), re.compile(r'.*\.js$')) generated_externs = os.path.join(source_base, 'dist', 'shaka-player.check.externs.js') check_build = build.Build() check_build.include = set(manual_externs) check_build.include.add(generated_externs) # Build with the complete set of externs, but without any application code. # This will help find issues in the generated externs, independent of the app. # Since we have no app, don't use the defines. Unused defines cause a # compilation error. closure_opts = build.common_closure_opts + build.debug_closure_opts + [ '--checks-only', '-O', 'SIMPLE' ] ok = check_build.build_raw(closure_opts) # Clean up the temporary externs we just generated. os.unlink(generated_externs) # Return the success/failure of the build above. return ok
def test_simple_pattern_no_match(self): """ Test simple_pattern with no match, nothing should be modified """ reqs = buildreq.Requirements("") pkg = build.Build() pkg.simple_pattern('line to test for somepkg.xyz', r'testpkg.xyz', 'testpkg', reqs) self.assertEqual(reqs.buildreqs, set()) self.assertEqual(pkg.must_restart, 0)
def complete_build_files(): """Returns a complete set of build files.""" complete = build.Build() # Normally we don't need to include @core, but because we look at the build # object directly, we need to include it here. When using main(), it will # call addCore which will ensure core is included. if not complete.parse_build(['+@complete', '+@core'], os.getcwd()): logging.error('Error parsing complete build') return False return complete.include
def test_simple_pattern_pkgconfig(self): """ Test simple_pattern_pkgconfig with match """ reqs = buildreq.Requirements("") pkg = build.Build() pkg.simple_pattern_pkgconfig('line to test for testpkg.xyz', r'testpkg.xyz', 'testpkg', False, reqs) self.assertIn('pkgconfig(testpkg)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_simple_pattern_pkgconfig_32bit(self): """ Test simple_pattern_pkgconfig with match and 32bit option set """ reqs = buildreq.Requirements("") pkg = build.Build() pkg.simple_pattern_pkgconfig('line to test for testpkg.zyx', r'testpkg.zyx', 'testpkgz', True, reqs) self.assertIn('pkgconfig(32testpkgz)', reqs.buildreqs) self.assertIn('pkgconfig(testpkgz)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def generate(self): env = environment.Environment(self.source_dir, self.build_dir, self.meson_script_file, self.options) mlog.initialize(env.get_log_dir()) mlog.debug('Build started at', datetime.datetime.now().isoformat()) mlog.debug('Python binary:', sys.executable) mlog.debug('Python system:', platform.system()) mlog.log(mlog.bold('The Meson build system')) mlog.log('Version:', coredata.version) mlog.log('Source dir:', mlog.bold(self.source_dir)) mlog.log('Build dir:', mlog.bold(self.build_dir)) if env.is_cross_build(): mlog.log('Build type:', mlog.bold('cross build')) else: mlog.log('Build type:', mlog.bold('native build')) b = build.Build(env) if self.options.backend == 'ninja': import ninjabackend g = ninjabackend.NinjaBackend(b) elif self.options.backend == 'vs2010': import vs2010backend g = vs2010backend.Vs2010Backend(b) elif self.options.backend == 'xcode': import xcodebackend g = xcodebackend.XCodeBackend(b) else: raise RuntimeError('Unknown backend "%s".' % self.options.backend) intr = interpreter.Interpreter(b, g) if env.is_cross_build(): mlog.log( 'Host machine cpu family:', mlog.bold(intr.builtin['host_machine'].cpu_family_method([], {}))) mlog.log( 'Host machine cpu:', mlog.bold(intr.builtin['host_machine'].cpu_method([], {}))) mlog.log( 'Target machine cpu family:', mlog.bold(intr.builtin['target_machine'].cpu_family_method( [], {}))) mlog.log( 'Target machine cpu:', mlog.bold(intr.builtin['target_machine'].cpu_method([], {}))) mlog.log( 'Build machine cpu family:', mlog.bold(intr.builtin['build_machine'].cpu_family_method([], {}))) mlog.log('Build machine cpu:', mlog.bold(intr.builtin['build_machine'].cpu_method([], {}))) intr.run() g.generate(intr) env.generating_finished() dumpfile = os.path.join(env.get_scratch_dir(), 'build.dat') pickle.dump(b, open(dumpfile, 'wb'))
def test_failed_pattern_no_match(self): """ Test failed_pattern with no match """ conf = config.Config('') reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern('line to test for failure: somepkg', conf, reqs, r'(test)', 0) self.assertEqual(reqs.buildreqs, set()) self.assertEqual(pkg.must_restart, 0)
def test_failed_pattern_no_buildtool_match(self): """ Test failed_pattern with buildtool unset and match in failed_commands """ conf = config.Config('') reqs = buildreq.Requirements("") conf.setup_patterns() pkg = build.Build() pkg.failed_pattern('line to test for failure: lex', conf, reqs, r'(lex)', 0) self.assertIn('flex', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_failed_pattern_no_buildtool(self): """ Test failed_pattern with buildtool unset and initial match, but no match in failed_commands. """ conf = config.Config('') reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern('line to test for failure: testpkg', conf, reqs, r'(test)', 0) self.assertEqual(reqs.buildreqs, set()) self.assertEqual(pkg.must_restart, 0)
def test_simple_pattern(self): """ Test simple_pattern with match. The main difference between simple_pattern and simple_pattern_pkgconfig is the string that is added to buildreq.buildreqs. """ reqs = buildreq.Requirements("") pkg = build.Build() pkg.simple_pattern('line to test for testpkg.xyz', r'testpkg.xyz', 'testpkg', reqs) self.assertIn('testpkg', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_failed_pattern_pypi(self): """ Test failed_pattern with buildtool set to pypi """ conf = config.Config('') reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern( 'line to test for failure: testpkg.py', conf, reqs, r'(testpkg)', 0, # verbose=0 buildtool='pypi') self.assertIn('pypi(testpkg)', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def _buildServer(self): sys.stdout.write("%suilding server for build '%s'... " % ("B" if not self._rebuild else "Reb", self._buildSettings.getName())) sys.stdout.flush() if self._build == None: self._build = build.Build(settings=self._buildSettings, ssh=self._ssh, remotePath=self._remotePath) if self._rebuild: self._build.makeClean() self._build.makeAll() elif self._rebuild: self._build.makeClean() self._build.makeAll() print "done"
def test_failed_pattern_maven(self): """ Test failed_pattern with buildtool set to maven, but no match in config.maven_jars, it should just prepend 'mvn-' to the package name. """ conf = config.Config() reqs = buildreq.Requirements("") pkg = build.Build("/") pkg.failed_pattern( 'line to test for failure: testpkg', conf, reqs, r'(testpkg)', 0, # verbose=0 buildtool='maven') self.assertIn('mvn-testpkg', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_failed_pattern_ruby(self): """ Test failed_pattern with buildtool set to ruby, but no match in config.gems, it should just prepend 'rubygem-' to the package name. """ conf = config.Config('') reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern( 'line to test for failure: testpkg.rb', conf, reqs, r'(testpkg)', 0, # verbose=0 buildtool='ruby') self.assertIn('rubygem-testpkg', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def test_failed_pattern_ruby_table_no_match(self): """ Test failed_pattern with buildtool set to ruby table but no match in config.gems. This should not modify anything. """ conf = config.Config('') reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern( 'line to test for failure: testpkg', conf, reqs, r'(testpkg)', 0, # verbose=0 buildtool='ruby table') self.assertEqual(reqs.buildreqs, set()) self.assertEqual(pkg.must_restart, 0)
def test_failed_pattern_R(self): """ Test failed_pattern with buildtool set to R """ conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern( 'line to test for failure: testpkg.r', conf, reqs, r'(testpkg)', 0, # verbose=0 buildtool='R') self.assertIn('R-testpkg', reqs.buildreqs) self.assertNotIn('R-testpkg', reqs.requires[None]) self.assertEqual(pkg.must_restart, 1)
def test_failed_pattern_ruby_table(self): """ Test failed_pattern with buildtool set to ruby table and a match in config.gems """ conf = config.Config('') conf.setup_patterns() reqs = buildreq.Requirements("") pkg = build.Build() pkg.failed_pattern( 'line to test for failure: test/unit', conf, reqs, r'(test/unit)', 0, # verbose=0 buildtool='ruby table') self.assertIn('rubygem-test-unit', reqs.buildreqs) self.assertEqual(pkg.must_restart, 1)
def check_spelling(_): """Checks that source files don't have any common misspellings.""" logging.info('Checking for common misspellings...') complete = build.Build() # Normally we don't need to include @core, but because we look at the build # object directly, we need to include it here. When using main(), it will # call addCore which will ensure core is included. if not complete.parse_build(['+@complete', '+@core'], os.getcwd()): logging.error('Error parsing complete build') return False base = shakaBuildHelpers.get_source_base() complete.include.update( shakaBuildHelpers.get_all_files(os.path.join(base, 'test'), re.compile(r'.*\.js$'))) complete.include.update( shakaBuildHelpers.get_all_files(os.path.join(base, 'demo'), re.compile(r'.*\.js$'))) complete.include.update( shakaBuildHelpers.get_all_files(os.path.join(base, 'build'), re.compile(r'.*\.(js|py)$'))) with shakaBuildHelpers.open_file( os.path.join(base, 'build', 'misspellings.txt')) as f: misspellings = ast.literal_eval(f.read()) has_error = False for path in complete.include: with shakaBuildHelpers.open_file(path) as f: for i, line in enumerate(f): for regex, replace_pattern in misspellings.items(): for match in re.finditer(regex, line): repl = match.expand(replace_pattern) if match.group(0).lower() == repl: continue # No-op suggestion if not has_error: logging.error( 'The following file(s) have misspellings:') logging.error( ' %s:%d:%d: Did you mean %r?' % (os.path.relpath( path, base), i + 1, match.start() + 1, repl)) has_error = True return not has_error