def test_unified_build_finder(self): finder = UnifiedBuildFinder(FileFinder(self.tmppath('a')), FileFinder(self.tmppath('b'))) # Test chrome.manifest unification self.create_both('chrome.manifest', 'a\nb\nc\n') self.create_one('a', 'chrome/chrome.manifest', 'a\nb\nc\n') self.create_one('b', 'chrome/chrome.manifest', 'b\nc\na\n') self.assertEqual( sorted([(f, c.open().read()) for f, c in finder.find('**/chrome.manifest')]), [('chrome.manifest', 'a\nb\nc\n'), ('chrome/chrome.manifest', 'a\nb\nc\n')]) # Test buildconfig.html unification self.create_one( 'a', 'chrome/browser/foo/buildconfig.html', '\n'.join([ '<html>', '<body>', '<h1>about:buildconfig</h1>', '<div>foo</div>', '</body>', '</html>', ])) self.create_one( 'b', 'chrome/browser/foo/buildconfig.html', '\n'.join([ '<html>', '<body>', '<h1>about:buildconfig</h1>', '<div>bar</div>', '</body>', '</html>', ])) self.assertEqual( sorted([(f, c.open().read()) for f, c in finder.find('**/buildconfig.html')]), [('chrome/browser/foo/buildconfig.html', '\n'.join([ '<html>', '<body>', '<h1>about:buildconfig</h1>', '<div>foo</div>', '<hr> </hr>', '<div>bar</div>', '</body>', '</html>', ]))]) # Test xpi file unification xpi = MockDest() with JarWriter(fileobj=xpi, compress=True) as jar: jar.add('foo', 'foo') jar.add('bar', 'bar') foo_xpi = xpi.read() self.create_both('foo.xpi', foo_xpi) with JarWriter(fileobj=xpi, compress=True) as jar: jar.add('foo', 'bar') self.create_one('a', 'bar.xpi', foo_xpi) self.create_one('b', 'bar.xpi', xpi.read()) errors.out = StringIO() with self.assertRaises(AccumulatedErrors), errors.accumulate(): self.assertEqual([(f, c.open().read()) for f, c in finder.find('*.xpi')], [('foo.xpi', foo_xpi)]) errors.out = sys.stderr # Test install.rdf unification x86_64 = 'Darwin_x86_64-gcc3' x86 = 'Darwin_x86-gcc3' target_tag = '<{em}targetPlatform>{platform}</{em}targetPlatform>' target_attr = '{em}targetPlatform="{platform}" ' rdf_tag = ''.join([ '<{RDF}Description {em}bar="bar" {em}qux="qux">', '<{em}foo>foo</{em}foo>', '{targets}', '<{em}baz>baz</{em}baz>', '</{RDF}Description>' ]) rdf_attr = ''.join([ '<{RDF}Description {em}bar="bar" {attr}{em}qux="qux">', '{targets}', '<{em}foo>foo</{em}foo><{em}baz>baz</{em}baz>', '</{RDF}Description>' ]) for descr_ns, target_ns in (('RDF:', ''), ('', 'em:'), ('RDF:', 'em:')): # First we need to infuse the above strings with our namespaces and # platform values. ns = {'RDF': descr_ns, 'em': target_ns} target_tag_x86_64 = target_tag.format(platform=x86_64, **ns) target_tag_x86 = target_tag.format(platform=x86, **ns) target_attr_x86_64 = target_attr.format(platform=x86_64, **ns) target_attr_x86 = target_attr.format(platform=x86, **ns) tag_x86_64 = rdf_tag.format(targets=target_tag_x86_64, **ns) tag_x86 = rdf_tag.format(targets=target_tag_x86, **ns) tag_merged = rdf_tag.format(targets=target_tag_x86_64 + target_tag_x86, **ns) tag_empty = rdf_tag.format(targets="", **ns) attr_x86_64 = rdf_attr.format(attr=target_attr_x86_64, targets="", **ns) attr_x86 = rdf_attr.format(attr=target_attr_x86, targets="", **ns) attr_merged = rdf_attr.format(attr="", targets=target_tag_x86_64 + target_tag_x86, **ns) # This table defines the test cases, columns "a" and "b" being the # contents of the install.rdf of the respective platform and # "result" the exepected merged content after unification. testcases = ( #_____a_____ _____b_____ ___result___# (tag_x86_64, tag_x86, tag_merged), (tag_x86_64, tag_empty, tag_empty), (tag_empty, tag_x86, tag_empty), (tag_empty, tag_empty, tag_empty), (attr_x86_64, attr_x86, attr_merged), (tag_x86_64, attr_x86, tag_merged), (attr_x86_64, tag_x86, attr_merged), (attr_x86_64, tag_empty, tag_empty), (tag_empty, attr_x86, tag_empty)) # Now create the files from the above table and compare results = [] for emid, (rdf_a, rdf_b, result) in enumerate(testcases): filename = 'ext/id{0}/install.rdf'.format(emid) self.create_one('a', filename, rdf_a) self.create_one('b', filename, rdf_b) results.append((filename, result)) self.assertEqual( sorted([(f, c.open().read()) for f, c in finder.find('**/install.rdf')]), results)
def find_files(archive): extra_entries = [] generated_harness_files = find_generated_harness_files() if archive == 'common': # Construct entries ensuring all our generated harness files are # packaged in the common tests archive. packaged_paths = set() for entry in OBJDIR_TEST_FILES.values(): pat = mozpath.join(entry['base'], entry['pattern']) del entry['pattern'] patterns = [] for path in generated_harness_files: if mozpath.match(path, pat): patterns.append(path[len(entry['base']) + 1:]) packaged_paths.add(path) if patterns: entry['patterns'] = patterns extra_entries.append(entry) entry = { 'source': buildconfig.topobjdir, 'base': '_tests', 'patterns': [], } for path in set(generated_harness_files) - packaged_paths: entry['patterns'].append(path[len('_tests') + 1:]) extra_entries.append(entry) for entry in ARCHIVE_FILES[archive] + extra_entries: source = entry['source'] dest = entry.get('dest') base = entry.get('base', '') pattern = entry.get('pattern') patterns = entry.get('patterns', []) if pattern: patterns.append(pattern) manifest = entry.get('manifest') manifests = entry.get('manifests', []) if manifest: manifests.append(manifest) if manifests: dirs = find_manifest_dirs(os.path.join(source, base), manifests) patterns.extend({'{}/**'.format(d) for d in dirs}) ignore = list(entry.get('ignore', [])) ignore.extend([ '**/.flake8', '**/.mkdir.done', '**/*.pyc', ]) if archive not in ('common', 'updater-dep') and base.startswith('_tests'): # We may have generated_harness_files to exclude from this entry. for path in generated_harness_files: if path.startswith(base): ignore.append(path[len(base) + 1:]) common_kwargs = { 'find_dotfiles': True, 'ignore': ignore, } finder = FileFinder(os.path.join(source, base), **common_kwargs) for pattern in patterns: for p, f in finder.find(pattern): if dest: p = mozpath.join(dest, p) yield p, f
def read_from_gyp(config, path, output, vars, non_unified_sources=set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ time_start = time.time() all_sources = set() # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], } # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend( encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(mozpath.dirname(path)), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) context['IS_GYP_DIR'] = True spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if spec['type'] == 'none': continue elif spec['type'] == 'static_library': # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. sources = set( mozpath.normpath(mozpath.join(context.srcdir, f)) for f in spec.get('sources', []) if mozpath.splitext(f)[-1] != '.h') asm_sources = set(f for f in sources if f.endswith('.S')) unified_sources = sources - non_unified_sources - asm_sources sources -= unified_sources all_sources |= sources # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get( 'asflags_mozilla', []) context['EXTRA_COMPILE_FLAGS'] = target_conf.get( 'cflags_mozilla', []) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '/ipc/chromium/src', '/ipc/glue', ] context['GENERATED_INCLUDES'] += ['/ipc/ipdl/_ipdlheaders'] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True context.execution_time = time.time() - time_start yield context time_start = time.time()
def finder(self): if self._finder: return self._finder self._finder = FileFinder(mozpath.normsep(self.path)) return self._finder
def consume_finished(self): out_dir = self._out_dir try: os.makedirs(out_dir) except OSError as e: if e.errno != errno.EEXIST: raise projects = {} for lib, path in sorted(self._libs_to_paths.items()): config = self._paths_to_configs.get(path, None) sources = self._paths_to_sources.get(path, set()) sources = set( os.path.join('$(TopSrcDir)', path, s) for s in sources) sources = set(os.path.normpath(s) for s in sources) finder = FileFinder(os.path.join(self.environment.topsrcdir, path), find_executables=False) headers = [t[0] for t in finder.find('*.h')] headers = [ os.path.normpath(os.path.join('$(TopSrcDir)', path, f)) for f in headers ] includes = [ os.path.join('$(TopSrcDir)', path), os.path.join('$(TopObjDir)', path), ] includes.extend(self._paths_to_includes.get(path, [])) includes.append('$(TopObjDir)\\dist\\include\\nss') includes.append('$(TopObjDir)\\dist\\include') for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'): if not config: break args = config.substs.get(v, '').split() for i, arg in enumerate(args): if arg.startswith('-I'): includes.append(os.path.normpath(arg[2:])) # Pull in system defaults. includes.append('$(DefaultIncludes)') includes = [os.path.normpath(i) for i in includes] defines = [] for k, v in self._paths_to_defines.get(path, {}).items(): if v is True: defines.append(k) else: defines.append('%s=%s' % (k, v)) basename = 'library_%s' % lib project_id = self._write_vs_project( out_dir, basename, lib, includes=includes, forced_includes=[ '$(TopObjDir)\\dist\\include\\mozilla-config.h' ], defines=defines, headers=headers, sources=sources) projects[basename] = (project_id, basename, lib) # Generate projects that can be used to build common targets. for target in ('export', 'binaries', 'tools', 'full'): basename = 'target_%s' % target command = '$(SolutionDir)\\mach.bat build' if target != 'full': command += ' %s' % target project_id = self._write_vs_project( out_dir, basename, target, build_command=command, clean_command='$(SolutionDir)\\mach.bat build clean') projects[basename] = (project_id, basename, target) # A project that can be used to regenerate the visual studio projects. basename = 'target_vs' project_id = self._write_vs_project( out_dir, basename, 'visual-studio', build_command= '$(SolutionDir)\\mach.bat build-backend -b VisualStudio') projects[basename] = (project_id, basename, 'visual-studio') # A project to run the main application binary. app_name = self.environment.substs['MOZ_APP_NAME'] basename = 'binary_%s' % app_name project_id = self._write_vs_project( out_dir, basename, app_name, debugger=('$(TopObjDir)\\dist\\bin\\%s.exe' % app_name, '-no-remote')) projects[basename] = (project_id, basename, app_name) # Projects to run other common binaries. for app in ['js', 'xpcshell']: basename = 'binary_%s' % app project_id = self._write_vs_project( out_dir, basename, app, debugger=('$(TopObjDir)\\dist\\bin\\%s.exe' % app, '')) projects[basename] = (project_id, basename, app) # Write out a shared property file with common variables. props_path = os.path.join(out_dir, 'mozilla.props') with open(props_path, 'wb') as fh: self._write_props(fh) # Generate some wrapper scripts that allow us to invoke mach inside # a MozillaBuild-like environment. We currently only use the batch # script. We'd like to use the PowerShell script. However, it seems # to buffer output from within Visual Studio (surely this is # configurable) and the default execution policy of PowerShell doesn't # allow custom scripts to be executed. with open(os.path.join(out_dir, 'mach.bat'), 'wb') as fh: self._write_mach_batch(fh) with open(os.path.join(out_dir, 'mach.ps1'), 'wb') as fh: self._write_mach_powershell(fh) # Write out a solution file to tie it all together. solution_path = os.path.join(out_dir, 'mozilla.sln') with open(solution_path, 'wb') as fh: self._write_solution(fh, projects)
def _process_test_manifest(self, sandbox, info, manifest_path): flavor, install_prefix, filter_inactive = info manifest_path = os.path.normpath(manifest_path) path = mozpath.normpath(mozpath.join(sandbox['SRCDIR'], manifest_path)) manifest_dir = mozpath.dirname(path) manifest_reldir = mozpath.dirname( mozpath.relpath(path, sandbox['TOPSRCDIR'])) try: m = manifestparser.TestManifest(manifests=[path], strict=True) if not m.tests: raise SandboxValidationError('Empty test manifest: %s' % path) obj = TestManifest(sandbox, path, m, flavor=flavor, install_prefix=install_prefix, relpath=mozpath.join(manifest_reldir, mozpath.basename(path)), dupe_manifest='dupe-manifest' in m.tests[0]) filtered = m.tests if filter_inactive: filtered = m.active_tests(disabled=False, **self.mozinfo) out_dir = mozpath.join(install_prefix, manifest_reldir) finder = FileFinder(base=manifest_dir, find_executables=False) # "head" and "tail" lists. # All manifests support support-files. # # Keep a set of already seen support file patterns, because # repeatedly processing the patterns from the default section # for every test is quite costly (see bug 922517). extras = (('head', set()), ('tail', set()), ('support-files', set())) for test in filtered: obj.tests.append(test) obj.installs[mozpath.normpath(test['path'])] = \ mozpath.join(out_dir, test['relpath']) for thing, seen in extras: value = test.get(thing, '') if value in seen: continue seen.add(value) for pattern in value.split(): # We only support globbing on support-files because # the harness doesn't support * for head and tail. # # While we could feed everything through the finder, we # don't because we want explicitly listed files that # no longer exist to raise an error. The finder is also # slower than simple lookup. if '*' in pattern and thing == 'support-files': paths = [f[0] for f in finder.find(pattern)] if not paths: raise SandboxValidationError( '%s support-files ' 'wildcard in %s returns no results.' % (pattern, path)) for f in paths: full = mozpath.normpath( mozpath.join(manifest_dir, f)) obj.installs[full] = mozpath.join(out_dir, f) else: full = mozpath.normpath( mozpath.join(manifest_dir, pattern)) # Only install paths in our directory. This # rule is somewhat arbitrary and could be lifted. if not full.startswith(manifest_dir): continue obj.installs[full] = mozpath.join(out_dir, pattern) # We also copy the manifest into the output directory. out_path = mozpath.join(out_dir, os.path.basename(manifest_path)) obj.installs[path] = out_path # Some manifests reference files that are auto generated as # part of the build or shouldn't be installed for some # reason. Here, we prune those files from the install set. # FUTURE we should be able to detect autogenerated files from # other build metadata. Once we do that, we can get rid of this. for f in m.tests[0].get('generated-files', '').split(): # We re-raise otherwise the stack trace isn't informative. try: del obj.installs[mozpath.join(manifest_dir, f)] except KeyError: raise SandboxValidationError( 'Error processing test ' 'manifest %s: entry in generated-files not present ' 'elsewhere in manifest: %s' % (path, f)) obj.external_installs.add(mozpath.join(out_dir, f)) yield obj except (AssertionError, Exception): raise SandboxValidationError( 'Error processing test ' 'manifest file %s: %s' % (path, '\n'.join(traceback.format_exception(*sys.exc_info()))))
def _write_projects_for_sources(self, sources, prefix, out_dir): projects = {} for item, path in sorted(sources.items()): config = self._paths_to_configs.get(path, None) sources = self._paths_to_sources.get(path, set()) sources = set( os.path.join('$(TopSrcDir)', path, s) for s in sources) sources = set(os.path.normpath(s) for s in sources) finder = FileFinder(os.path.join(self.environment.topsrcdir, path)) headers = [t[0] for t in finder.find('*.h')] headers = [ os.path.normpath(os.path.join('$(TopSrcDir)', path, f)) for f in headers ] includes = [ os.path.join('$(TopSrcDir)', path), os.path.join('$(TopObjDir)', path), ] includes.extend(self._paths_to_includes.get(path, [])) includes.append('$(TopObjDir)\\dist\\include\\nss') includes.append('$(TopObjDir)\\dist\\include') for v in ('NSPR_CFLAGS', 'NSS_CFLAGS', 'MOZ_JPEG_CFLAGS', 'MOZ_PNG_CFLAGS', 'MOZ_ZLIB_CFLAGS', 'MOZ_PIXMAN_CFLAGS'): if not config: break args = config.substs.get(v, []) for i, arg in enumerate(args): if arg.startswith('-I'): includes.append(os.path.normpath(arg[2:])) # Pull in system defaults. includes.append('$(DefaultIncludes)') includes = [os.path.normpath(i) for i in includes] defines = [] for k, v in self._paths_to_defines.get(path, {}).items(): if v is True: defines.append(k) else: defines.append('%s=%s' % (k, v)) debugger = None if prefix == 'binary': if item.startswith(self.environment.substs['MOZ_APP_NAME']): debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '-no-remote') else: debugger = ('$(TopObjDir)\\dist\\bin\\%s' % item, '') basename = '%s_%s' % (prefix, item) project_id = self._write_vs_project( out_dir, basename, item, includes=includes, forced_includes=[ '$(TopObjDir)\\dist\\include\\mozilla-config.h' ], defines=defines, headers=headers, sources=sources, debugger=debugger) projects[basename] = (project_id, basename, item) return projects
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--minify-js', action='store_true', help='Minify JavaScript files while packaging.') parser.add_argument('--js-binary', help='Path to js binary. This is used to verify ' 'minified JavaScript. If this is not defined, ' 'minification verification will not be performed.') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--optimizejars', action='store_true', default=False, help='Enable jar optimizations') parser.add_argument('--unify', default='', help='Base directory of another build to unify with') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, optimize=args.optimizejars) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], optimize=args.optimizejars, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] respath = '' if 'RESPATH' in defines: respath = SimpleManifestSink.normalize_path(defines['RESPATH']) while respath.startswith('/'): respath = respath[1:] if args.unify: def is_native(path): path = os.path.abspath(path) return platform.machine() in mozpath.split(path) # Invert args.unify and args.source if args.unify points to the # native architecture. args.source, args.unify = sorted([args.source, args.unify], key=is_native, reverse=True) if is_native(args.source): launcher.tooldir = args.source elif not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = buildconfig.substs['LIBXUL_DIST'] with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ) if args.js_binary: finder_args['minify_js_verify_command'] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js-compare-ast.js') ] if args.unify: finder = UnifiedBuildFinder(FileFinder(args.source), FileFinder(args.unify), **finder_args) else: finder = FileFinder(args.source, **finder_args) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): if not mozinfo.isMac: for lib in SIGN_LIBS: libbase = mozpath.join(respath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add( libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # Setup preloading if args.jarlog and os.path.exists(args.jarlog): from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue key = JarLog.canonicalize(os.path.join(args.destination, p)) if key in log: f.preload(log[key]) # Fill startup cache if isinstance(formatter, OmniJarFormatter) and launcher.can_launch() \ and buildconfig.substs['MOZ_DISABLE_STARTUPCACHE'] != '1': if buildconfig.substs.get('LIBXUL_SDK'): gre_path = mozpath.join(buildconfig.substs['LIBXUL_DIST'], 'bin') else: gre_path = None def get_bases(): for b in sink.packager.get_bases(addons=False): for p in (mozpath.join('bin', b), b): if os.path.exists(os.path.join(args.source, p)): yield p break for base in sorted(get_bases()): if not gre_path: gre_path = base base_path = sink.normalize_path(base) if base_path in formatter.omnijars: precompile_cache(formatter.omnijars[base_path], args.source, gre_path, base) copier.copy(args.destination)
def validate(self, config): self.maxDiff = None test_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data', 'build') + os.sep # We want unicode instances out of the files, because having plain str # makes assertEqual diff output in case of error extra verbose because # of the difference in type. result = { p: f.open().read().decode('utf-8') for p, f in FileFinder(mozpath.join(config.topobjdir, 'dist')) } self.assertTrue(len(result)) self.assertEqual(result, { 'bin/baz.ini': 'baz.ini: FOO is foo\n', 'bin/child/bar.ini': 'bar.ini\n', 'bin/child2/foo.css': 'foo.css: FOO is foo\n', 'bin/child2/qux.ini': 'qux.ini: BAR is not defined\n', 'bin/chrome.manifest': 'manifest chrome/foo.manifest\n' 'manifest components/components.manifest\n', 'bin/chrome/foo.manifest': 'content bar foo/child/\n' 'content foo foo/\n' 'override chrome://foo/bar.svg#hello ' 'chrome://bar/bar.svg#hello\n', 'bin/chrome/foo/bar.js': 'bar.js\n', 'bin/chrome/foo/child/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), 'bin/chrome/foo/child/hoge.js': '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path), 'bin/chrome/foo/foo.css': 'foo.css: FOO is foo\n', 'bin/chrome/foo/foo.js': 'foo.js\n', 'bin/chrome/foo/qux.js': 'bar.js\n', 'bin/components/bar.js': '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path), 'bin/components/components.manifest': 'component {foo} foo.js\ncomponent {bar} bar.js\n', 'bin/components/foo.js': 'foo.js\n', 'bin/defaults/pref/prefs.js': 'prefs.js\n', 'bin/foo.ini': 'foo.ini\n', 'bin/modules/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), 'bin/modules/child/bar.jsm': 'bar.jsm\n', 'bin/modules/child2/qux.jsm': '//@line 4 "%squx.jsm"\nqux.jsm: BAR is not defined\n' % (test_path), 'bin/modules/foo.jsm': 'foo.jsm\n', 'bin/res/resource': 'resource\n', 'bin/res/child/resource2': 'resource2\n', 'bin/app/baz.ini': 'baz.ini: FOO is bar\n', 'bin/app/child/bar.ini': 'bar.ini\n', 'bin/app/child2/qux.ini': 'qux.ini: BAR is defined\n', 'bin/app/chrome.manifest': 'manifest chrome/foo.manifest\n' 'manifest components/components.manifest\n', 'bin/app/chrome/foo.manifest': 'content bar foo/child/\n' 'content foo foo/\n' 'override chrome://foo/bar.svg#hello ' 'chrome://bar/bar.svg#hello\n', 'bin/app/chrome/foo/bar.js': 'bar.js\n', 'bin/app/chrome/foo/child/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), 'bin/app/chrome/foo/child/hoge.js': '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path), 'bin/app/chrome/foo/foo.css': 'foo.css: FOO is bar\n', 'bin/app/chrome/foo/foo.js': 'foo.js\n', 'bin/app/chrome/foo/qux.js': 'bar.js\n', 'bin/app/components/bar.js': '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path), 'bin/app/components/components.manifest': 'component {foo} foo.js\ncomponent {bar} bar.js\n', 'bin/app/components/foo.js': 'foo.js\n', 'bin/app/defaults/preferences/prefs.js': 'prefs.js\n', 'bin/app/foo.css': 'foo.css: FOO is bar\n', 'bin/app/foo.ini': 'foo.ini\n', 'bin/app/modules/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), 'bin/app/modules/child/bar.jsm': 'bar.jsm\n', 'bin/app/modules/child2/qux.jsm': '//@line 2 "%squx.jsm"\nqux.jsm: BAR is defined\n' % (test_path), 'bin/app/modules/foo.jsm': 'foo.jsm\n', })
def make_archive(archive_name, base, exclude, include): compress = ['**/*.sym'] finder = FileFinder(base, ignore=exclude) if not include: include = ['*'] archive_basename = os.path.basename(archive_name) def fill_archive(add_file): for pat in include: for p, f in finder.find(pat): print(' Adding to "%s":\n\t"%s"' % (archive_basename, p)) add_file(p, f) with open(archive_name, 'wb') as fh: if archive_basename.endswith('.zip'): from mozpack.mozjar import JarWriter with JarWriter(fileobj=fh, compress_level=5) as writer: def add_file(p, f): should_compress = any( mozpath.match(p, pat) for pat in compress) writer.add(p.encode('utf-8'), f, mode=f.mode, compress=should_compress, skip_duplicates=True) fill_archive(add_file) elif archive_basename.endswith('.tar.zst'): import mozfile import subprocess import tarfile from buildconfig import topsrcdir # Ideally, we'd do this: # import zstandard # ctx = zstandard.ZstdCompressor(threads=-1) # zstdwriter = ctx.stream_writer(fh) # and use `zstdwriter` as the fileobj input for `tarfile.open`. # But this script is invoked with `PYTHON3` in a Makefile, which # uses the virtualenv python where zstandard is not installed. # Both `sys.executable` and `buildconfig.substs['PYTHON3']` would # be the same. Instead, search within PATH to find another python3, # which hopefully has zstandard available (and that's the case on # automation, where this code path is expected to be followed). python_path = mozpath.normpath(os.path.dirname(sys.executable)) path = [ p for p in os.environ['PATH'].split(os.pathsep) if mozpath.normpath(p) != python_path ] python3 = mozfile.which('python3', path=path) proc = subprocess.Popen([ python3, os.path.join(topsrcdir, 'taskcluster', 'scripts', 'misc', 'zstdpy'), '-T0', ], stdin=subprocess.PIPE, stdout=fh) with tarfile.open(mode='w|', fileobj=proc.stdin, bufsize=1024 * 1024) as tar: def add_file(p, f): info = tar.gettarinfo(os.path.join(base, p), p) tar.addfile(info, f.open()) fill_archive(add_file) proc.stdin.close() proc.wait() else: raise Exception( 'Unsupported archive format for {}'.format(archive_basename))
to what you'd see elsewhere in Python. Arguments to built-in exceptions like KeyError are machine parseable. This machine-friendly data is used to present user-friendly error messages in the case of errors. """ from __future__ import absolute_import, unicode_literals import os import sys import weakref from mozbuild.util import ReadOnlyDict from .context import Context from mozpack.files import FileFinder default_finder = FileFinder('/', find_executables=False) def alphabetical_sorted(iterable, cmp=None, key=lambda x: x.lower(), reverse=False): """sorted() replacement for the sandbox, ordering alphabetically by default. """ return sorted(iterable, cmp, key, reverse) class SandboxError(Exception): def __init__(self, file_stack): self.file_stack = file_stack
def _parse_android_test_results(config, topsrcdir=None, report_dir=None): # A brute force way to turn a Java FQN into a path on disk. Assumes Java # and Kotlin sources are in mobile/android for performance and simplicity. sourcepath_finder = FileFinder(os.path.join(topsrcdir, "mobile", "android")) finder = FileFinder(report_dir) reports = list(finder.find("TEST-*.xml")) if not reports: raise RuntimeError("No reports found under {}".format(report_dir)) for report, _ in reports: tree = ET.parse(open(os.path.join(finder.base, report), "rt")) root = tree.getroot() class_name = root.get( "name" ) # Like 'org.mozilla.gecko.permissions.TestPermissions'. path = ( "**/" + class_name.replace(".", "/") + ".*" ) # Like '**/org/mozilla/gecko/permissions/TestPermissions.*'. # NOQA: E501 for testcase in root.findall("testcase"): function_name = testcase.get("name") # Schema cribbed from http://llg.cubic.org/docs/junit/. for unexpected in itertools.chain( testcase.findall("error"), testcase.findall("failure") ): sourcepaths = list(sourcepath_finder.find(path)) if not sourcepaths: raise RuntimeError( "No sourcepath found for class {class_name}".format( class_name=class_name ) ) for sourcepath, _ in sourcepaths: lineno = 0 message = unexpected.get("message") # Turn '... at org.mozilla.gecko.permissions.TestPermissions.testMultipleRequestsAreQueuedAndDispatchedSequentially(TestPermissions.java:118)' into 118. # NOQA: E501 pattern = r"at {class_name}\.{function_name}\(.*:(\d+)\)" pattern = pattern.format( class_name=class_name, function_name=function_name ) match = re.search(pattern, message) if match: lineno = int(match.group(1)) else: msg = "No source line found for {class_name}.{function_name}".format( class_name=class_name, function_name=function_name ) raise RuntimeError(msg) err = { "level": "error", "rule": unexpected.get("type"), "message": message, "path": os.path.join("mobile", "android", sourcepath), "lineno": lineno, } yield result.from_config(config, **err)
def android_test(self, args): ret = self.gradle(self.substs['GRADLE_ANDROID_TEST_TASKS'] + ["--continue"] + args, verbose=True) # Findbug produces both HTML and XML reports. Visit the # XML report(s) to report errors and link to the HTML # report(s) for human consumption. import itertools import xml.etree.ElementTree as ET from mozpack.files import ( FileFinder, ) root_url = self._root_url( artifactdir='public/android/unittest', objdir='gradle/build/mobile/android/app/reports/tests') reports = (self.substs['GRADLE_ANDROID_APP_VARIANT_NAME'], ) for report in reports: finder = FileFinder( os.path.join(self.topobjdir, 'gradle/build/mobile/android/app/test-results/', report)) for p, _ in finder.find('TEST-*.xml'): f = open(os.path.join(finder.base, p), 'rt') tree = ET.parse(f) root = tree.getroot() # Log reports for Tree Herder "Job Details". print( 'TinderboxPrint: report<br/><a href="{}/{}/index.html">HTML {} report</a>, visit "Inspect Task" link for details' .format(root_url, report, report)) # And make the report display as soon as possible. failed = root.findall('testcase/error') or root.findall( 'testcase/failure') if failed: print( 'TEST-UNEXPECTED-FAIL | android-test | There were failing tests. See the reports at: {}/{}/index.html' .format(root_url, report)) print('SUITE-START | android-test | {} {}'.format( report, root.get('name'))) for testcase in root.findall('testcase'): name = testcase.get('name') print('TEST-START | {}'.format(name)) # Schema cribbed from # http://llg.cubic.org/docs/junit/. There's no # particular advantage to formatting the error, so # for now let's just output the unexpected XML # tag. error_count = 0 for unexpected in itertools.chain( testcase.findall('error'), testcase.findall('failure')): for line in ET.tostring( unexpected).strip().splitlines(): print('TEST-UNEXPECTED-FAIL | {} | {}'.format( name, line)) error_count += 1 ret |= 1 # Skipped tests aren't unexpected at this time; we # disable some tests that require live remote # endpoints. for skipped in testcase.findall('skipped'): for line in ET.tostring(skipped).strip().splitlines(): print('TEST-INFO | {} | {}'.format(name, line)) if not error_count: print('TEST-PASS | {}'.format(name)) print('SUITE-END | android-test | {} {}'.format( report, root.get('name'))) return ret
def validate(self, config): self.maxDiff = None test_path = os.sep.join(('$SRCDIR', 'python', 'mozbuild', 'mozbuild', 'test', 'backend', 'data', 'build')) + os.sep result = { p: f.open(mode='r').read() for p, f in FileFinder(mozpath.join(config.topobjdir, 'dist')) } self.assertTrue(len(result)) self.assertEqual( result, { 'bin/baz.ini': 'baz.ini: FOO is foo\n', 'bin/child/bar.ini': 'bar.ini\n', 'bin/child2/foo.css': 'foo.css: FOO is foo\n', 'bin/child2/qux.ini': 'qux.ini: BAR is not defined\n', 'bin/chrome.manifest': 'manifest chrome/foo.manifest\n' 'manifest components/components.manifest\n', 'bin/chrome/foo.manifest': 'content bar foo/child/\n' 'content foo foo/\n' 'override chrome://foo/bar.svg#hello ' 'chrome://bar/bar.svg#hello\n', 'bin/chrome/foo/bar.js': 'bar.js\n', 'bin/chrome/foo/child/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), 'bin/chrome/foo/child/hoge.js': '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path), 'bin/chrome/foo/foo.css': 'foo.css: FOO is foo\n', 'bin/chrome/foo/foo.js': 'foo.js\n', 'bin/chrome/foo/qux.js': 'bar.js\n', 'bin/components/bar.js': '//@line 2 "%sbar.js"\nbar.js: FOO is foo\n' % (test_path), 'bin/components/components.manifest': 'component {foo} foo.js\ncomponent {bar} bar.js\n', 'bin/components/foo.js': 'foo.js\n', 'bin/defaults/pref/prefs.js': 'prefs.js\n', 'bin/foo.ini': 'foo.ini\n', 'bin/modules/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is foo\n' % (test_path), 'bin/modules/child/bar.jsm': 'bar.jsm\n', 'bin/modules/child2/qux.jsm': '//@line 4 "%squx.jsm"\nqux.jsm: BAR is not defined\n' % (test_path), 'bin/modules/foo.jsm': 'foo.jsm\n', 'bin/res/resource': 'resource\n', 'bin/res/child/resource2': 'resource2\n', 'bin/app/baz.ini': 'baz.ini: FOO is bar\n', 'bin/app/child/bar.ini': 'bar.ini\n', 'bin/app/child2/qux.ini': 'qux.ini: BAR is defined\n', 'bin/app/chrome.manifest': 'manifest chrome/foo.manifest\n' 'manifest components/components.manifest\n', 'bin/app/chrome/foo.manifest': 'content bar foo/child/\n' 'content foo foo/\n' 'override chrome://foo/bar.svg#hello ' 'chrome://bar/bar.svg#hello\n', 'bin/app/chrome/foo/bar.js': 'bar.js\n', 'bin/app/chrome/foo/child/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), 'bin/app/chrome/foo/child/hoge.js': '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path), 'bin/app/chrome/foo/foo.css': 'foo.css: FOO is bar\n', 'bin/app/chrome/foo/foo.js': 'foo.js\n', 'bin/app/chrome/foo/qux.js': 'bar.js\n', 'bin/app/components/bar.js': '//@line 2 "%sbar.js"\nbar.js: FOO is bar\n' % (test_path), 'bin/app/components/components.manifest': 'component {foo} foo.js\ncomponent {bar} bar.js\n', 'bin/app/components/foo.js': 'foo.js\n', 'bin/app/defaults/preferences/prefs.js': 'prefs.js\n', 'bin/app/foo.css': 'foo.css: FOO is bar\n', 'bin/app/foo.ini': 'foo.ini\n', 'bin/app/modules/baz.jsm': '//@line 2 "%sbaz.jsm"\nbaz.jsm: FOO is bar\n' % (test_path), 'bin/app/modules/child/bar.jsm': 'bar.jsm\n', 'bin/app/modules/child2/qux.jsm': '//@line 2 "%squx.jsm"\nqux.jsm: BAR is defined\n' % (test_path), 'bin/app/modules/foo.jsm': 'foo.jsm\n', })
def _process_android_eclipse_project_data(self, data, srcdir, objdir): # This can't be relative to the environment's topsrcdir, # because during testing topsrcdir is faked. template_directory = os.path.abspath(mozpath.join(os.path.dirname(__file__), 'templates', 'android_eclipse')) project_directory = mozpath.join(self.environment.topobjdir, 'android_eclipse', data.name) manifest_path = mozpath.join(self.environment.topobjdir, 'android_eclipse', '%s.manifest' % data.name) manifest = self._manifest_for_project(srcdir, data) ensureParentDir(manifest_path) manifest.write(path=manifest_path) classpathentries = [] for cpe in sorted(data._classpathentries, key=lambda x: x.path): e = self._Element_for_classpathentry(cpe) classpathentries.append(ET.tostring(e)) for name in sorted(data.referenced_projects): e = self._Element_for_referenced_project(name) classpathentries.append(ET.tostring(e)) for name in sorted(data.extra_jars): e = self._Element_for_extra_jar(mozpath.join(srcdir, name)) classpathentries.append(ET.tostring(e)) defines = {} defines['IDE_OBJDIR'] = objdir defines['IDE_TOPOBJDIR'] = self.environment.topobjdir defines['IDE_SRCDIR'] = srcdir defines['IDE_TOPSRCDIR'] = self.environment.topsrcdir defines['IDE_PROJECT_NAME'] = data.name defines['IDE_PACKAGE_NAME'] = data.package_name defines['IDE_PROJECT_DIRECTORY'] = project_directory defines['IDE_RELSRCDIR'] = mozpath.relpath(srcdir, self.environment.topsrcdir) defines['IDE_CLASSPATH_ENTRIES'] = '\n'.join('\t' + cpe for cpe in classpathentries) defines['IDE_RECURSIVE_MAKE_TARGETS'] = ' '.join(sorted(data.recursive_make_targets)) # Like android.library=true defines['IDE_PROJECT_LIBRARY_SETTING'] = 'android.library=true' if data.is_library else '' # Like android.library.reference.1=FennecBrandingResources defines['IDE_PROJECT_LIBRARY_REFERENCES'] = '\n'.join( 'android.library.reference.%s=%s' % (i + 1, ref) for i, ref in enumerate(sorted(data.included_projects))) if data.filtered_resources: filteredResources = self._Element_for_filtered_resources(data.filtered_resources) defines['IDE_PROJECT_FILTERED_RESOURCES'] = pretty_print(filteredResources).strip() else: defines['IDE_PROJECT_FILTERED_RESOURCES'] = '' defines['ANDROID_TARGET_SDK'] = self.environment.substs['ANDROID_TARGET_SDK'] copier = FileCopier() finder = FileFinder(template_directory) for input_filename, f in itertools.chain(finder.find('**'), finder.find('.**')): if input_filename == 'AndroidManifest.xml' and not data.is_library: # Main projects supply their own manifests. continue copier.add(input_filename, PreprocessedFile( mozpath.join(finder.base, input_filename), depfile_path=None, marker='#', defines=defines, extra_depends={mozpath.join(finder.base, input_filename)})) # When we re-create the build backend, we kill everything that was there. if os.path.isdir(project_directory): self.summary.updated_count += 1 else: self.summary.created_count += 1 copier.copy(project_directory, skip_if_older=False, remove_unaccounted=True)
from __future__ import absolute_import, print_function, unicode_literals import os import six import sys import weakref from mozbuild.util import ( exec_, ReadOnlyDict, ) from .context import Context from mozpack.files import FileFinder default_finder = FileFinder('/') def alphabetical_sorted(iterable, key=lambda x: x.lower(), reverse=False): """sorted() replacement for the sandbox, ordering alphabetically by default. """ return sorted(iterable, key=key, reverse=reverse) class SandboxError(Exception): def __init__(self, file_stack): self.file_stack = file_stack class SandboxExecutionError(SandboxError):
def package_fennec_apk(inputs=[], omni_ja=None, classes_dex=None, lib_dirs=[], assets_dirs=[], features_dirs=[], szip_assets_libs_with=None, root_files=[], verbose=False): jarrer = Jarrer(optimize=False) # First, take input files. The contents of the later files overwrites the # content of earlier files. for input in inputs: jar = JarReader(input) for file in jar: path = file.filename if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, DeflatedFile(file), compress=file.compressed) def add(path, file, compress=None): abspath = os.path.abspath(file.path) if verbose: print('Packaging %s from %s' % (path, file.path)) if not os.path.exists(abspath): raise ValueError('File %s not found (looked for %s)' % \ (file.path, abspath)) if jarrer.contains(path): jarrer.remove(path) jarrer.add(path, file, compress=compress) for features_dir in features_dirs: finder = FileFinder(features_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('assets', 'features', p), f, False) for assets_dir in assets_dirs: finder = FileFinder(assets_dir, find_executables=False) for p, f in finder.find('**'): compress = None # Take default from Jarrer. if p.endswith('.so'): # Asset libraries are special. if szip_assets_libs_with: # We need to szip libraries before packing. The file # returned by the finder is not yet opened. When it is # opened, it will "see" the content updated by szip. subprocess.check_output( [szip_assets_libs_with, mozpath.join(finder.base, p)]) if f.open().read(4) == 'SeZz': # We need to store (rather than deflate) szipped libraries # (even if we don't szip them ourselves). compress = False add(mozpath.join('assets', p), f, compress=compress) for lib_dir in lib_dirs: finder = FileFinder(lib_dir, find_executables=False) for p, f in finder.find('**'): add(mozpath.join('lib', p), f) for root_file in root_files: add(os.path.basename(root_file), File(root_file)) if omni_ja: add(mozpath.join('assets', 'omni.ja'), File(omni_ja), compress=False) if classes_dex: add('classes.dex', File(classes_dex)) return jarrer
def main(): parser = ArgumentParser() parser.add_argument('-D', dest='defines', action='append', metavar="VAR[=VAL]", help='Define a variable') parser.add_argument('--format', default='omni', help='Choose the chrome format for packaging ' + '(omni, jar or flat ; default: %(default)s)') parser.add_argument('--removals', default=None, help='removed-files source file') parser.add_argument('--ignore-errors', action='store_true', default=False, help='Transform errors into warnings.') parser.add_argument('--minify', action='store_true', default=False, help='Make some files more compact while packaging') parser.add_argument('--minify-js', action='store_true', help='Minify JavaScript files while packaging.') parser.add_argument('--js-binary', help='Path to js binary. This is used to verify ' 'minified JavaScript. If this is not defined, ' 'minification verification will not be performed.') parser.add_argument('--jarlog', default='', help='File containing jar ' + 'access logs') parser.add_argument('--optimizejars', action='store_true', default=False, help='Enable jar optimizations') parser.add_argument('--compress', choices=('none', 'deflate', 'brotli'), default='deflate', help='Use given jar compression (default: deflate)') parser.add_argument('manifest', default=None, nargs='?', help='Manifest file name') parser.add_argument('source', help='Source directory') parser.add_argument('destination', help='Destination directory') parser.add_argument('--non-resource', nargs='+', metavar='PATTERN', default=[], help='Extra files not to be considered as resources') args = parser.parse_args() defines = dict(buildconfig.defines) if args.ignore_errors: errors.ignore_errors() if args.defines: for name, value in [split_define(d) for d in args.defines]: defines[name] = value compress = { 'none': False, 'deflate': True, 'brotli': JAR_BROTLI, }[args.compress] copier = FileCopier() if args.format == 'flat': formatter = FlatFormatter(copier) elif args.format == 'jar': formatter = JarFormatter(copier, compress=compress, optimize=args.optimizejars) elif args.format == 'omni': formatter = OmniJarFormatter(copier, buildconfig.substs['OMNIJAR_NAME'], compress=compress, optimize=args.optimizejars, non_resources=args.non_resource) else: errors.fatal('Unknown format: %s' % args.format) # Adjust defines according to the requested format. if isinstance(formatter, OmniJarFormatter): defines['MOZ_OMNIJAR'] = 1 elif 'MOZ_OMNIJAR' in defines: del defines['MOZ_OMNIJAR'] respath = '' if 'RESPATH' in defines: respath = SimpleManifestSink.normalize_path(defines['RESPATH']) while respath.startswith('/'): respath = respath[1:] if not buildconfig.substs['CROSS_COMPILE']: launcher.tooldir = mozpath.join(buildconfig.topobjdir, 'dist') with errors.accumulate(): finder_args = dict( minify=args.minify, minify_js=args.minify_js, ) if args.js_binary: finder_args['minify_js_verify_command'] = [ args.js_binary, os.path.join(os.path.abspath(os.path.dirname(__file__)), 'js-compare-ast.js') ] finder = FileFinder(args.source, find_executables=True, **finder_args) if 'NO_PKG_FILES' in os.environ: sinkformatter = NoPkgFilesRemover(formatter, args.manifest is not None) else: sinkformatter = formatter sink = SimpleManifestSink(finder, sinkformatter) if args.manifest: preprocess_manifest(sink, args.manifest, defines) else: sink.add(Component(''), 'bin/*') sink.close(args.manifest is not None) if args.removals: removals_in = StringIO(open(args.removals).read()) removals_in.name = args.removals removals = RemovedFiles(copier) preprocess(removals_in, removals, defines) copier.add(mozpath.join(respath, 'removed-files'), removals) # shlibsign libraries if launcher.can_launch(): if not mozinfo.isMac and buildconfig.substs.get('COMPILE_ENVIRONMENT'): for lib in SIGN_LIBS: libbase = mozpath.join(respath, '%s%s') \ % (buildconfig.substs['DLL_PREFIX'], lib) libname = '%s%s' % (libbase, buildconfig.substs['DLL_SUFFIX']) if copier.contains(libname): copier.add( libbase + '.chk', LibSignFile(os.path.join(args.destination, libname))) # Setup preloading if args.jarlog and os.path.exists(args.jarlog): from mozpack.mozjar import JarLog log = JarLog(args.jarlog) for p, f in copier: if not isinstance(f, Jarrer): continue key = JarLog.canonicalize(os.path.join(args.destination, p)) if key in log: f.preload(log[key]) copier.copy(args.destination) generate_precomplete( os.path.normpath(os.path.join(args.destination, respath)))
def _processEntryLine(self, e, outHelper, jf): out = e.output src = e.source # pick the right sourcedir -- l10n, topsrc or src if e.is_locale: # If the file is a Fluent l10n resource, we want to skip the # 'en-US' fallbacking. # # To achieve that, we're testing if we have more than one localedir, # and if the last of those has 'en-US' in it. # If that's the case, we're removing the last one. if (e.source.endswith('.ftl') and len(self.localedirs) > 1 and 'en-US' in self.localedirs[-1]): src_base = self.localedirs[:-1] else: src_base = self.localedirs elif src.startswith('/'): # path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul) # refers to a path relative to topsourcedir, use that as base # and strip the leading '/' src_base = [self.topsourcedir] src = src[1:] else: # use srcdirs and the objdir (current working dir) for relative paths src_base = self.sourcedirs + [os.getcwd()] if '*' in src: def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) emitted = set() for _srcdir in src_base: finder = FileFinder(_srcdir) for path, _ in finder.find(src): # If the path was already seen in one of the other source # directories, skip it. That matches the non-wildcard case # below, where we pick the first existing file. reduced_path = path[len(prefix):] if reduced_path in emitted: continue emitted.add(reduced_path) e = JarManifestEntry( mozpath.join(out, reduced_path), path, is_locale=e.is_locale, preprocess=e.preprocess, ) self._processEntryLine(e, outHelper, jf) return # check if the source file exists realsrc = None for _srcdir in src_base: if os.path.isfile(os.path.join(_srcdir, src)): realsrc = os.path.join(_srcdir, src) break if realsrc is None: if jf is not None: jf.close() raise RuntimeError('File "{0}" not found in {1}'.format( src, ', '.join(src_base))) if out in self._seen_output: raise RuntimeError('%s already added' % out) self._seen_output.add(out) if e.preprocess: outf = outHelper.getOutput(out, mode='w') inf = io.open(realsrc, encoding='utf-8') pp = self.pp.clone() if src[-4:] == '.css': pp.setMarker('%') pp.out = outf pp.do_include(inf) pp.failUnused(realsrc) outf.close() inf.close() return # copy or symlink if newer if getModTime(realsrc) > outHelper.getDestModTime(e.output): if self.outputFormat == 'symlink': outHelper.symlink(realsrc, out) return outf = outHelper.getOutput(out) # open in binary mode, this can be images etc inf = open(realsrc, 'rb') outf.write(inf.read()) outf.close() inf.close()
def finder(self): if self._finder: return self._finder self._finder = FileFinder( self.path, ignore=self.exclude) return self._finder
def _write_projects_for_sources(self, sources, prefix, out_dir): projects = {} for item, path in sorted(sources.items()): config = self._paths_to_configs.get(path, None) sources = self._paths_to_sources.get(path, set()) sources = set( os.path.join("$(TopSrcDir)", path, s) for s in sources) sources = set(os.path.normpath(s) for s in sources) finder = FileFinder(os.path.join(self.environment.topsrcdir, path)) headers = [t[0] for t in finder.find("*.h")] headers = [ os.path.normpath(os.path.join("$(TopSrcDir)", path, f)) for f in headers ] includes = [ os.path.join("$(TopSrcDir)", path), os.path.join("$(TopObjDir)", path), ] includes.extend(self._paths_to_includes.get(path, [])) includes.append("$(TopObjDir)\\dist\\include\\nss") includes.append("$(TopObjDir)\\dist\\include") for v in ( "NSPR_CFLAGS", "NSS_CFLAGS", "MOZ_JPEG_CFLAGS", "MOZ_PNG_CFLAGS", "MOZ_ZLIB_CFLAGS", "MOZ_PIXMAN_CFLAGS", ): if not config: break args = config.substs.get(v, []) for i, arg in enumerate(args): if arg.startswith("-I"): includes.append(os.path.normpath(arg[2:])) # Pull in system defaults. includes.append("$(DefaultIncludes)") includes = [os.path.normpath(i) for i in includes] defines = [] for k, v in self._paths_to_defines.get(path, {}).items(): if v is True: defines.append(k) else: defines.append("%s=%s" % (k, v)) debugger = None if prefix == "binary": if item.startswith(self.environment.substs["MOZ_APP_NAME"]): app_args = "-no-remote -profile $(TopObjDir)\\tmp\\profile-default" if self.environment.substs.get("MOZ_LAUNCHER_PROCESS", False): app_args += " -wait-for-browser" debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, app_args) else: debugger = ("$(TopObjDir)\\dist\\bin\\%s" % item, "") basename = "%s_%s" % (prefix, item) project_id = self._write_vs_project( out_dir, basename, item, includes=includes, forced_includes=[ "$(TopObjDir)\\dist\\include\\mozilla-config.h" ], defines=defines, headers=headers, sources=sources, debugger=debugger, ) projects[basename] = (project_id, basename, item) return projects
def android_test(self, args): gradle_targets = [ 'app:testOfficialPhotonDebugUnitTest', ] ret = self.gradle(gradle_targets + ["--continue"] + args, verbose=True) # Findbug produces both HTML and XML reports. Visit the # XML report(s) to report errors and link to the HTML # report(s) for human consumption. import itertools import xml.etree.ElementTree as ET from mozpack.files import ( FileFinder, ) if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ: root_url = "https://queue.taskcluster.net/v1/task/{}/runs/{}/artifacts/public/android/unittest".format( os.environ['TASK_ID'], os.environ['RUN_ID']) else: root_url = os.path.join( self.topobjdir, 'gradle/build/mobile/android/app/reports/tests') reports = ('officialPhotonDebug', ) for report in reports: finder = FileFinder( os.path.join(self.topobjdir, 'gradle/build/mobile/android/app/test-results/', report)) for p, _ in finder.find('TEST-*.xml'): f = open(os.path.join(finder.base, p), 'rt') tree = ET.parse(f) root = tree.getroot() print('SUITE-START | android-test | {} {}'.format( report, root.get('name'))) for testcase in root.findall('testcase'): name = testcase.get('name') print('TEST-START | {}'.format(name)) # Schema cribbed from # http://llg.cubic.org/docs/junit/. There's no # particular advantage to formatting the error, so # for now let's just output the unexpected XML # tag. error_count = 0 for unexpected in itertools.chain( testcase.findall('error'), testcase.findall('failure')): for line in ET.tostring( unexpected).strip().splitlines(): print('TEST-UNEXPECTED-FAIL | {} | {}'.format( name, line)) error_count += 1 ret |= 1 # Skipped tests aren't unexpected at this time; we # disable some tests that require live remote # endpoints. for skipped in testcase.findall('skipped'): for line in ET.tostring(skipped).strip().splitlines(): print('TEST-INFO | {} | {}'.format(name, line)) if not error_count: print('TEST-PASS | {}'.format(name)) print('SUITE-END | android-test | {} {}'.format( report, root.get('name'))) title = report print( "TinderboxPrint: report<br/><a href='{}/{}/index.html'>HTML {} report</a>, visit \"Inspect Task\" link for details" .format(root_url, report, title)) return ret
def process_package_artifact(self, filename, processed_filename): tempdir = tempfile.mkdtemp() try: self.log(logging.INFO, 'artifact', {'tempdir': tempdir}, 'Unpacking DMG into {tempdir}') mozinstall.install( filename, tempdir) # Doesn't handle already mounted DMG files nicely: # InstallError: Failed to install "/Users/nalexander/.mozbuild/package-frontend/b38eeeb54cdcf744-firefox-44.0a1.en-US.mac.dmg (local variable 'appDir' referenced before assignment)" # File "/Users/nalexander/Mozilla/gecko/mobile/android/mach_commands.py", line 250, in artifact_install # return artifacts.install_from(source, self.distdir) # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 457, in install_from # return self.install_from_hg(source, distdir) # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 445, in install_from_hg # return self.install_from_url(url, distdir) # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 418, in install_from_url # return self.install_from_file(filename, distdir) # File "/Users/nalexander/Mozilla/gecko/python/mozbuild/mozbuild/artifacts.py", line 336, in install_from_file # mozinstall.install(filename, tempdir) # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 117, in install # install_dir = _install_dmg(src, dest) # File "/Users/nalexander/Mozilla/gecko/objdir-dce/_virtualenv/lib/python2.7/site-packages/mozinstall/mozinstall.py", line 261, in _install_dmg # subprocess.call('hdiutil detach %s -quiet' % appDir, bundle_dirs = glob.glob(mozpath.join(tempdir, '*.app')) if len(bundle_dirs) != 1: raise ValueError( 'Expected one source bundle, found: {}'.format( bundle_dirs)) [source] = bundle_dirs # These get copied into dist/bin without the path, so "root/a/b/c" -> "dist/bin/c". paths_no_keep_path = ( 'Contents/MacOS', [ 'crashreporter.app/Contents/MacOS/crashreporter', 'firefox', 'firefox-bin', 'libfreebl3.dylib', 'liblgpllibs.dylib', # 'liblogalloc.dylib', 'libmozglue.dylib', 'libnss3.dylib', 'libnssckbi.dylib', 'libnssdbm3.dylib', 'libplugin_child_interpose.dylib', # 'libreplace_jemalloc.dylib', # 'libreplace_malloc.dylib', 'libsoftokn3.dylib', 'plugin-container.app/Contents/MacOS/plugin-container', 'updater.app/Contents/MacOS/updater', # 'xpcshell', 'XUL', ]) # These get copied into dist/bin with the path, so "root/a/b/c" -> "dist/bin/a/b/c". paths_keep_path = ( 'Contents/Resources', [ 'browser/components/libbrowsercomps.dylib', 'dependentlibs.list', # 'firefox', 'gmp-clearkey/0.1/libclearkey.dylib', # 'gmp-fake/1.0/libfake.dylib', # 'gmp-fakeopenh264/1.0/libfakeopenh264.dylib', 'webapprt-stub', ]) with JarWriter(file=processed_filename, optimize=False, compress_level=5) as writer: root, paths = paths_no_keep_path finder = FileFinder(mozpath.join(source, root)) for path in paths: for p, f in finder.find(path): self.log(logging.INFO, 'artifact', {'path': path}, 'Adding {path} to processed archive') destpath = mozpath.join('bin', os.path.basename(p)) writer.add(destpath.encode('utf-8'), f, mode=os.stat(mozpath.join(finder.base, p)).st_mode) root, paths = paths_keep_path finder = FileFinder(mozpath.join(source, root)) for path in paths: for p, f in finder.find(path): self.log(logging.INFO, 'artifact', {'path': path}, 'Adding {path} to processed archive') destpath = mozpath.join('bin', p) writer.add(destpath.encode('utf-8'), f, mode=os.stat(mozpath.join(finder.base, p)).st_mode) finally: try: shutil.rmtree(tempdir) except (OSError, IOError): self.log(logging.WARN, 'artifact', {'tempdir': tempdir}, 'Unable to delete {tempdir}') pass
def main(args): parser = argparse.ArgumentParser() parser.add_argument('--verbose', '-v', default=False, action='store_true', help='be verbose') parser.add_argument('--silent', '-s', default=False, action='store_true', help='be silent') parser.add_argument('--android-package-name', metavar='NAME', required=True, help='Android package name') parser.add_argument( '--resources', metavar='RESOURCES', default=None, help='optional Android resource directory to find drawables in') parser.add_argument( '--srcdir', metavar='SRCDIR', action='append', required=True, help='directories to read inputs from, in order of priority') parser.add_argument('output', metavar='OUTPUT', help='output') opts = parser.parse_args(args) # Use reversed order so that the first srcdir has higher priority to override keys. all_properties = merge_properties('region.properties', reversed(opts.srcdir)) names = get_site_list_from_properties(all_properties) if opts.verbose: print('Reading {len} suggested sites: {names}'.format(len=len(names), names=names)) # Keep these two in sync. image_url_template = 'android.resource://%s/drawable/suggestedsites_{name}' % opts.android_package_name drawables_template = 'drawable*/suggestedsites_{name}.*' # Load properties corresponding to each site name and define their # respective image URL. sites = [] for name in names: site = get_site_from_properties(name, all_properties) site['imageurl'] = image_url_template.format(name=name) sites.append(site) # Now check for existence of an appropriately named drawable. If none # exists, throw. This stops a locale discovering, at runtime, that the # corresponding drawable was not added to en-US. if not opts.resources: continue resources = os.path.abspath(opts.resources) finder = FileFinder(resources) matches = [ p for p, _ in finder.find(drawables_template.format(name=name)) ] if not matches: raise Exception( "Could not find drawable in '{resources}' for '{name}'".format( resources=resources, name=name)) else: if opts.verbose: print( "Found {len} drawables in '{resources}' for '{name}': {matches}" .format(len=len(matches), resources=resources, name=name, matches=matches)) # FileAvoidWrite creates its parent directories. output = os.path.abspath(opts.output) fh = FileAvoidWrite(output) json.dump(sites, fh) existed, updated = fh.close() if not opts.silent: if updated: print('{output} updated'.format(output=output)) else: print('{output} already up-to-date'.format(output=output)) return 0
def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/branding', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) if target.startswith('_tests'): # TODO: TEST_HARNESS_FILES present a few challenges for the tup # backend (bug 1372381). return for path, files in obj.files.walk(): backend_file = self._get_backend_file(mozpath.join(target, path)) for f in files: if not isinstance(f, ObjDirPath): if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): backend_file.symlink_rule( mozpath.join(prefix, p), output=mozpath.join(f.target_basename, p), output_group=self._installed_files) else: backend_file.symlink_rule( f.full_path, output=f.target_basename, output_group=self._installed_files) else: if (self.environment.is_artifact_build and any( mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('layout/style/test', 'toolkit/library'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file( f.context.relobjdir) gen_backend_file.symlink_rule( f.full_path, output=output, output_group=self._installed_files)
def find_files(archive): extra_entries = [] generated_harness_files = find_generated_harness_files() if archive == "common": # Construct entries ensuring all our generated harness files are # packaged in the common tests archive. packaged_paths = set() for entry in OBJDIR_TEST_FILES.values(): pat = mozpath.join(entry["base"], entry["pattern"]) del entry["pattern"] patterns = [] for path in generated_harness_files: if mozpath.match(path, pat): patterns.append(path[len(entry["base"]) + 1:]) packaged_paths.add(path) if patterns: entry["patterns"] = patterns extra_entries.append(entry) entry = { "source": buildconfig.topobjdir, "base": "_tests", "patterns": [] } for path in set(generated_harness_files) - packaged_paths: entry["patterns"].append(path[len("_tests") + 1:]) extra_entries.append(entry) for entry in ARCHIVE_FILES[archive] + extra_entries: source = entry["source"] dest = entry.get("dest") base = entry.get("base", "") pattern = entry.get("pattern") patterns = entry.get("patterns", []) if pattern: patterns.append(pattern) manifest = entry.get("manifest") manifests = entry.get("manifests", []) if manifest: manifests.append(manifest) if manifests: dirs = find_manifest_dirs(os.path.join(source, base), manifests) patterns.extend({"{}/**".format(d) for d in dirs}) ignore = list(entry.get("ignore", [])) ignore.extend(["**/.flake8", "**/.mkdir.done", "**/*.pyc"]) if archive not in ("common", "updater-dep") and base.startswith("_tests"): # We may have generated_harness_files to exclude from this entry. for path in generated_harness_files: if path.startswith(base): ignore.append(path[len(base) + 1:]) common_kwargs = {"find_dotfiles": True, "ignore": ignore} finder = FileFinder(os.path.join(source, base), **common_kwargs) for pattern in patterns: for p, f in finder.find(pattern): if dest: p = mozpath.join(dest, p) yield p, f
def _processEntryLine(self, m, outHelper, jf): out = m.group('output') src = m.group('source') or os.path.basename(out) # pick the right sourcedir -- l10n, topsrc or src if m.group('locale'): src_base = self.localedirs elif src.startswith('/'): # path/in/jar/file_name.xul (/path/in/sourcetree/file_name.xul) # refers to a path relative to topsourcedir, use that as base # and strip the leading '/' src_base = [self.topsourcedir] src = src[1:] else: # use srcdirs and the objdir (current working dir) for relative paths src_base = self.sourcedirs + [os.getcwd()] if '*' in src: if not out.endswith('/'): out += '/' def _prefix(s): for p in s.split('/'): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) fmt = '%s%s %s%%s (%s%%s)' % ( m.group('optPreprocess') or '', m.group('optOverwrite') or '', out, m.group('locale') or '', ) for _srcdir in src_base: finder = FileFinder(_srcdir, find_executables=False) for path, _ in finder.find(src): line = fmt % (path[len(prefix):], path) m = self.entryline.match(line) if m: self._processEntryLine(m, outHelper, jf) return # check if the source file exists realsrc = None for _srcdir in src_base: if os.path.isfile(os.path.join(_srcdir, src)): realsrc = os.path.join(_srcdir, src) break if realsrc is None: if jf is not None: jf.close() raise RuntimeError('File "{0}" not found in {1}'.format(src, ', '.join(src_base))) if m.group('optPreprocess'): outf = outHelper.getOutput(out) inf = open(realsrc) pp = self.pp.clone() if src[-4:] == '.css': pp.setMarker('%') pp.out = outf pp.do_include(inf) pp.warnUnused(realsrc) outf.close() inf.close() return # copy or symlink if newer or overwrite if m.group('optOverwrite') or getModTime(realsrc) \ > outHelper.getDestModTime(m.group('output')): if self.outputFormat == 'symlink': outHelper.symlink(realsrc, out) return outf = outHelper.getOutput(out) # open in binary mode, this can be images etc inf = open(realsrc, 'rb') outf.write(inf.read()) outf.close() inf.close()
def _process_final_target_files(self, obj): target = obj.install_target if not isinstance(obj, ObjdirFiles): path = mozpath.basedir(target, ( 'dist/bin', 'dist/xpi-stage', '_tests', 'dist/include', 'dist/sdk', )) if not path: raise Exception("Cannot install to " + target) for path, files in obj.files.walk(): self._add_features(target, path) for f in files: output_group = None if any( mozpath.match(mozpath.basename(f), p) for p in self._compile_env_files): output_group = self._installed_files if not isinstance(f, ObjDirPath): backend_file = self._get_backend_file( mozpath.join(target, path)) if '*' in f: if f.startswith('/') or isinstance(f, AbsolutePath): basepath, wild = os.path.split(f.full_path) if '*' in basepath: raise Exception( "Wildcards are only supported in the filename part of " "srcdir-relative or absolute paths.") # TODO: This is only needed for Windows, so we can # skip this for now. pass else: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(f.full_path)) self.backend_input_files.add(prefix) output_dir = '' # If we have a RenamedSourcePath here, the common backend # has generated this object from a jar manifest, and we # can rely on 'path' to be our destination path relative # to any wildcard match. Otherwise, the output file may # contribute to our destination directory. if not isinstance(f, RenamedSourcePath): output_dir = ''.join( _prefix(mozpath.dirname(f))) finder = FileFinder(prefix) for p, _ in finder.find(f.full_path[len(prefix):]): install_dir = prefix[len(obj.srcdir) + 1:] output = p if f.target_basename and '*' not in f.target_basename: output = mozpath.join( f.target_basename, output) backend_file.symlink_rule( mozpath.join(prefix, p), output=mozpath.join(output_dir, output), output_group=output_group) else: backend_file.symlink_rule(f.full_path, output=f.target_basename, output_group=output_group) else: if (self.environment.is_artifact_build and any( mozpath.match(f.target_basename, p) for p in self._compile_env_gen_files)): # If we have an artifact build we never would have generated this file, # so do not attempt to install it. continue # We're not generating files in these directories yet, so # don't attempt to install files generated from them. if f.context.relobjdir not in ('toolkit/library', 'js/src/shell'): output = mozpath.join('$(MOZ_OBJ_ROOT)', target, path, f.target_basename) gen_backend_file = self._get_backend_file( f.context.relobjdir) if gen_backend_file.requires_delay([f]): gen_backend_file.delayed_installed_files.append( (f.full_path, output, output_group)) else: gen_backend_file.symlink_rule( f.full_path, output=output, output_group=output_group)
def test_file_finder(self): self.prepare_match_test(with_dotfiles=True) self.finder = FileFinder(self.tmpdir) self.do_match_test() self.do_finder_test(self.finder)
def _parse_android_test_results(self, artifactdir, gradledir, variants): # Unit tests produce both HTML and XML reports. Visit the # XML report(s) to report errors and link to the HTML # report(s) for human consumption. import itertools import xml.etree.ElementTree as ET from mozpack.files import ( FileFinder, ) ret = 0 found_reports = False root_url = self._root_url(artifactdir=artifactdir, objdir=gradledir + '/reports/tests') def capitalize(s): # Can't use str.capitalize because it lower cases trailing letters. return (s[0].upper() + s[1:]) if s else '' for variant in variants: report = 'test{}UnitTest'.format(capitalize(variant)) finder = FileFinder( os.path.join(self.topobjdir, gradledir + '/test-results/', report)) for p, _ in finder.find('TEST-*.xml'): found_reports = True f = open(os.path.join(finder.base, p), 'rt') tree = ET.parse(f) root = tree.getroot() # Log reports for Tree Herder "Job Details". print( 'TinderboxPrint: report<br/><a href="{}/{}/index.html">HTML {} report</a>, visit "Inspect Task" link for details' .format(root_url, report, report)) # And make the report display as soon as possible. failed = root.findall('testcase/error') or root.findall( 'testcase/failure') if failed: print( 'TEST-UNEXPECTED-FAIL | android-test | There were failing tests. See the reports at: {}/{}/index.html' .format(root_url, report)) print('SUITE-START | android-test | {} {}'.format( report, root.get('name'))) for testcase in root.findall('testcase'): name = testcase.get('name') print('TEST-START | {}'.format(name)) # Schema cribbed from # http://llg.cubic.org/docs/junit/. There's no # particular advantage to formatting the error, so # for now let's just output the unexpected XML # tag. error_count = 0 for unexpected in itertools.chain( testcase.findall('error'), testcase.findall('failure')): for line in ET.tostring( unexpected).strip().splitlines(): print('TEST-UNEXPECTED-FAIL | {} | {}'.format( name, line)) error_count += 1 ret |= 1 # Skipped tests aren't unexpected at this time; we # disable some tests that require live remote # endpoints. for skipped in testcase.findall('skipped'): for line in ET.tostring(skipped).strip().splitlines(): print('TEST-INFO | {} | {}'.format(name, line)) if not error_count: print('TEST-PASS | {}'.format(name)) print('SUITE-END | android-test | {} {}'.format( report, root.get('name'))) if not found_reports: print( 'TEST-UNEXPECTED-FAIL | android-test | No reports found under {}' .format(gradledir)) return 1 return ret