def __init__(self, topsrcdir='/path/to/topsrcdir', extra_substs={}, error_is_fatal=True, ): self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath('/path/to/topobjdir') self.substs = ReadOnlyDict({ 'MOZ_FOO': 'foo', 'MOZ_BAR': 'bar', 'MOZ_TRUE': '1', 'MOZ_FALSE': '', 'DLL_PREFIX': 'lib', 'DLL_SUFFIX': '.so' }, **extra_substs) self.substs_unicode = ReadOnlyDict({k.decode('utf-8'): v.decode('utf-8', 'replace') for k, v in self.substs.items()}) self.defines = self.substs self.external_source_dir = None self.lib_prefix = 'lib' self.lib_suffix = '.a' self.import_prefix = 'lib' self.import_suffix = '.so' self.dll_prefix = 'lib' self.dll_suffix = '.so' self.error_is_fatal = error_is_fatal
def __init__(self, paths, config, environ, *args, **kwargs): self._search_path = environ.get('PATH', '').split(os.pathsep) self._subprocess_paths = { mozpath.abspath(k): v for k, v in paths.iteritems() if v } paths = paths.keys() environ = dict(environ) if 'CONFIG_SHELL' not in environ: environ['CONFIG_SHELL'] = mozpath.abspath('/bin/sh') self._subprocess_paths[environ['CONFIG_SHELL']] = self.shell paths.append(environ['CONFIG_SHELL']) self._environ = copy.copy(environ) vfs = ConfigureTestVFS(paths) self.OS = ReadOnlyNamespace(path=ReadOnlyNamespace(**{ k: v if k not in ('exists', 'isfile') else getattr(vfs, k) for k, v in ConfigureSandbox.OS.path.__dict__.iteritems() })) super(ConfigureTestSandbox, self).__init__(config, environ, *args, **kwargs)
def __init__(self, paths, config, environ, *args, **kwargs): self._search_path = environ.get('PATH', '').split(os.pathsep) self._subprocess_paths = { mozpath.abspath(k): v for k, v in paths.iteritems() if v } paths = paths.keys() environ = dict(environ) if 'CONFIG_SHELL' not in environ: environ['CONFIG_SHELL'] = mozpath.abspath('/bin/sh') self._subprocess_paths[environ['CONFIG_SHELL']] = self.shell paths.append(environ['CONFIG_SHELL']) self._environ = copy.copy(environ) vfs = ConfigureTestVFS(paths) os_path = { k: getattr(vfs, k) for k in dir(vfs) if not k.startswith('_') } os_path.update(self.OS.path.__dict__) self.imported_os = ReadOnlyNamespace(path=ReadOnlyNamespace(**os_path)) super(ConfigureTestSandbox, self).__init__(config, environ, *args, **kwargs)
def test_mismatched_compiler(self): self.do_toolchain_test(self.PATHS, { 'c_compiler': self.GCC_4_9_RESULT, 'cxx_compiler': ( 'The target C compiler is gcc, while the target C++ compiler ' 'is clang. Need to use the same compiler suite.'), }, environ={ 'CXX': 'clang++', }) self.do_toolchain_test(self.PATHS, { 'c_compiler': self.GCC_4_9_RESULT, 'cxx_compiler': self.GXX_4_9_RESULT, 'host_c_compiler': self.GCC_4_9_RESULT, 'host_cxx_compiler': ( 'The host C compiler is gcc, while the host C++ compiler ' 'is clang. Need to use the same compiler suite.'), }, environ={ 'HOST_CXX': 'clang++', }) self.do_toolchain_test(self.PATHS, { 'c_compiler': '`%s` is not a C compiler.' % mozpath.abspath('/usr/bin/g++'), }, environ={ 'CC': 'g++', }) self.do_toolchain_test(self.PATHS, { 'c_compiler': self.GCC_4_9_RESULT, 'cxx_compiler': '`%s` is not a C++ compiler.' % mozpath.abspath('/usr/bin/gcc'), }, environ={ 'CXX': 'gcc', })
def setUpClass(cls): class Config(object): pass cls.config = config = Config() config.topsrcdir = mozpath.abspath(os.curdir) config.topobjdir = mozpath.abspath("obj") config.external_source_dir = None
def test_exec_source_success(self): sandbox = self.sandbox() context = sandbox._context sandbox.exec_source("foo = True", mozpath.abspath("foo.py")) self.assertNotIn("foo", context) self.assertEqual(context.main_path, mozpath.abspath("foo.py")) self.assertEqual(context.all_paths, set([mozpath.abspath("foo.py")]))
def test_exec_compile_error(self): sandbox = self.sandbox() with self.assertRaises(SandboxExecutionError) as se: sandbox.exec_source("2f23;k;asfj", mozpath.abspath("foo.py")) self.assertEqual(se.exception.file_stack, [mozpath.abspath("foo.py")]) self.assertIsInstance(se.exception.exc_value, SyntaxError) self.assertEqual(sandbox._context.main_path, mozpath.abspath("foo.py"))
def test_default_state(self): sandbox = self.sandbox() config = sandbox.config self.assertEqual(sandbox['TOPSRCDIR'], config.topsrcdir) self.assertEqual(sandbox['TOPOBJDIR'], mozpath.abspath(config.topobjdir)) self.assertEqual(sandbox['RELATIVEDIR'], '') self.assertEqual(sandbox['SRCDIR'], config.topsrcdir) self.assertEqual(sandbox['OBJDIR'], mozpath.abspath(config.topobjdir).replace(os.sep, '/'))
def test_compiler_result(self): result = CompilerResult() self.assertEquals(result.__dict__, { 'wrapper': [], 'compiler': mozpath.abspath(''), 'version': '', 'type': '', 'language': '', 'flags': [], }) result = CompilerResult( compiler='/usr/bin/gcc', version='4.2.1', type='gcc', language='C', flags=['-std=gnu99'], ) self.assertEquals(result.__dict__, { 'wrapper': [], 'compiler': mozpath.abspath('/usr/bin/gcc'), 'version': '4.2.1', 'type': 'gcc', 'language': 'C', 'flags': ['-std=gnu99'], }) result2 = result + {'flags': ['-m32']} self.assertEquals(result2.__dict__, { 'wrapper': [], 'compiler': mozpath.abspath('/usr/bin/gcc'), 'version': '4.2.1', 'type': 'gcc', 'language': 'C', 'flags': ['-std=gnu99', '-m32'], }) # Original flags are untouched. self.assertEquals(result.flags, ['-std=gnu99']) result3 = result + { 'compiler': '/usr/bin/gcc-4.7', 'version': '4.7.3', 'flags': ['-m32'], } self.assertEquals(result3.__dict__, { 'wrapper': [], 'compiler': mozpath.abspath('/usr/bin/gcc-4.7'), 'version': '4.7.3', 'type': 'gcc', 'language': 'C', 'flags': ['-std=gnu99', '-m32'], })
def test_imply_option_immediate_value(self): def get_config(*args): return self.get_config( *args, configure='imply_option/imm.configure') help, config = get_config(['--help']) self.assertEquals(config, {}) config = get_config([]) self.assertEquals(config, {}) config_path = mozpath.abspath( mozpath.join(test_data_path, 'imply_option', 'imm.configure')) with self.assertRaisesRegexp(InvalidOptionError, "--enable-foo' implied by 'imply_option at %s:7' conflicts with " "'--disable-foo' from the command-line" % config_path): get_config(['--disable-foo']) with self.assertRaisesRegexp(InvalidOptionError, "--enable-bar=foo,bar' implied by 'imply_option at %s:16' conflicts" " with '--enable-bar=a,b,c' from the command-line" % config_path): get_config(['--enable-bar=a,b,c']) with self.assertRaisesRegexp(InvalidOptionError, "--enable-baz=BAZ' implied by 'imply_option at %s:25' conflicts" " with '--enable-baz=QUUX' from the command-line" % config_path): get_config(['--enable-baz=QUUX'])
def _get_files_info(self, paths): from mozpack.files import FileFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) finder = FileFinder(self.topsrcdir, find_executables=False) # Expand wildcards. allpaths = [] for p in relpaths: if '*' not in p: if p not in allpaths: allpaths.append(p) continue for path, f in finder.find(p): if path not in allpaths: allpaths.append(path) reader = self._get_reader() return reader.files_info(allpaths)
def include_file(self, path): '''Include one file in the sandbox. Users of this class probably want Note: this will execute all template invocations, as well as @depends functions that depend on '--help', but nothing else. ''' if self._paths: path = mozpath.join(mozpath.dirname(self._paths[-1]), path) path = mozpath.normpath(path) if not mozpath.basedir(path, (mozpath.dirname(self._paths[0]),)): raise ConfigureError( 'Cannot include `%s` because it is not in a subdirectory ' 'of `%s`' % (path, mozpath.dirname(self._paths[0]))) else: path = mozpath.realpath(mozpath.abspath(path)) if path in self._all_paths: raise ConfigureError( 'Cannot include `%s` because it was included already.' % path) self._paths.append(path) self._all_paths.add(path) source = open(path, 'rb').read() code = compile(source, path, 'exec') exec_(code, self) self._paths.pop(-1)
def which(self, command, path=None): for parent in (path or self._search_path): c = mozpath.abspath(mozpath.join(parent, command)) for candidate in (c, ensure_exe_extension(c)): if self.imported_os.path.exists(candidate): return candidate raise WhichError()
def isfile(self, path): path = mozpath.abspath(path) if path in self._paths: return True if mozpath.basedir(path, [topsrcdir, topobjdir]): return os.path.isfile(path) return False
def do_toolchain_test(self, paths, results, args=[], environ={}): '''Helper to test the toolchain checks from toolchain.configure. - `paths` is a dict associating compiler paths to FakeCompiler definitions from above. - `results` is a dict associating result variable names from toolchain.configure (c_compiler, cxx_compiler, host_c_compiler, host_cxx_compiler) with a result. The result can either be an error string, or a dict with the following items: flags, version, type, compiler, wrapper. (wrapper can be omitted when it's empty). Those items correspond to the attributes of the object returned by toolchain.configure checks and will be compared to them. When the results for host_c_compiler are identical to c_compiler, they can be omitted. Likewise for host_cxx_compiler vs. cxx_compiler. ''' environ = dict(environ) if 'PATH' not in environ: environ['PATH'] = os.pathsep.join( mozpath.abspath(p) for p in ('/bin', '/usr/bin')) sandbox = self.get_sandbox(paths, {}, args, environ, logger=self.logger) for var in ('c_compiler', 'cxx_compiler', 'host_c_compiler', 'host_cxx_compiler'): if var in results: result = results[var] elif var.startswith('host_'): result = results.get(var[5:], {}) else: result = {} if isinstance(result, dict): result = dict(result) result.setdefault('wrapper', []) result['compiler'] = mozpath.abspath(result['compiler']) try: self.out.truncate(0) compiler = sandbox._value_for(sandbox[var]) # Add var on both ends to make it clear which of the # variables is failing the test when that happens. self.assertEquals((var, compiler.__dict__), (var, result)) except SystemExit: self.assertEquals((var, result), (var, self.out.getvalue().strip())) return
def __init__(self, topsrcdir='/path/to/topsrcdir', extra_substs={}): self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath('/path/to/topobjdir') self.substs = ReadOnlyDict({ 'MOZ_FOO': 'foo', 'MOZ_BAR': 'bar', 'MOZ_TRUE': '1', 'MOZ_FALSE': '', }, **extra_substs) self.substs_unicode = ReadOnlyDict({k.decode('utf-8'): v.decode('utf-8', 'replace') for k, v in self.substs.items()}) self.defines = self.substs self.external_source_dir = None
def test_path_calculation(self): sandbox = self.sandbox('foo/bar/moz.build') config = sandbox.config self.assertEqual(sandbox['RELATIVEDIR'], 'foo/bar') self.assertEqual(sandbox['SRCDIR'], '/'.join([config.topsrcdir, 'foo/bar'])) self.assertEqual(sandbox['OBJDIR'], mozpath.abspath('/'.join([config.topobjdir, 'foo/bar'])).replace(os.sep, '/'))
def get_value_for(args=[], environ={}, mozconfig=""): sandbox = self.get_sandbox({}, {}, args, environ, mozconfig) # Add a fake old-configure option sandbox.option_impl("--with-foo", nargs="*", help="Help missing for old configure options") result = sandbox._value_for(sandbox["all_configure_options"]) shell = mozpath.abspath("/bin/sh") return result.replace("CONFIG_SHELL=%s " % shell, "")
def get_value_for(args=[], environ={}, mozconfig=''): sandbox = self.get_sandbox({}, {}, args, environ, mozconfig) # Add a fake old-configure option sandbox.option_impl('--with-foo', nargs='*', help='Help missing for old configure options') result = sandbox._value_for(sandbox['all_configure_options']) shell = mozpath.abspath('/bin/sh') return result.replace('CONFIG_SHELL=%s ' % shell, '')
def main(): if len(sys.argv) != 2: print('Usage: icu_sources_data.py <mozilla topsrcdir>', file=sys.stderr) sys.exit(1) topsrcdir = mozpath.abspath(sys.argv[1]) update_sources(topsrcdir) if not update_data_file(topsrcdir): print('Error updating ICU data file', file=sys.stderr) sys.exit(1)
def test_dirs(self): class Config(object): pass config = Config() config.topsrcdir = mozpath.abspath(os.curdir) config.topobjdir = mozpath.abspath('obj') test = Context(config=config) foo = mozpath.abspath('foo') test.push_source(foo) self.assertEqual(test.srcdir, config.topsrcdir) self.assertEqual(test.relsrcdir, '') self.assertEqual(test.objdir, config.topobjdir) self.assertEqual(test.relobjdir, '') foobar = os.path.abspath('foo/bar') test.push_source(foobar) self.assertEqual(test.srcdir, mozpath.join(config.topsrcdir, 'foo')) self.assertEqual(test.relsrcdir, 'foo') self.assertEqual(test.objdir, config.topobjdir) self.assertEqual(test.relobjdir, '')
def test_config_file_substitution(self): reader = self.reader("config-file-substitution") objs = self.read_topsrcdir(reader) self.assertEqual(len(objs), 2) self.assertIsInstance(objs[0], ConfigFileSubstitution) self.assertIsInstance(objs[1], ConfigFileSubstitution) topobjdir = mozpath.abspath(reader.config.topobjdir) self.assertEqual(objs[0].relpath, "foo") self.assertEqual(mozpath.normpath(objs[0].output_path), mozpath.normpath(mozpath.join(topobjdir, "foo"))) self.assertEqual(mozpath.normpath(objs[1].output_path), mozpath.normpath(mozpath.join(topobjdir, "bar")))
def __init__(self, topsrcdir="/path/to/topsrcdir", extra_substs={}): self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath("/path/to/topobjdir") self.substs = ReadOnlyDict( {"MOZ_FOO": "foo", "MOZ_BAR": "bar", "MOZ_TRUE": "1", "MOZ_FALSE": ""}, **extra_substs ) self.substs_unicode = ReadOnlyDict( {k.decode("utf-8"): v.decode("utf-8", "replace") for k, v in self.substs.items()} ) self.defines = self.substs self.external_source_dir = None self.lib_prefix = "lib" self.lib_suffix = ".a" self.import_prefix = "lib" self.import_suffix = ".so" self.dll_prefix = "lib" self.dll_suffix = ".so"
def _get_files_info(self, paths, rev=None): from mozbuild.frontend.reader import default_finder from mozpack.files import FileFinder, MercurialRevisionFinder # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) repo = None if rev: hg_path = os.path.join(self.topsrcdir, '.hg') if not os.path.exists(hg_path): raise InvalidPathException('a Mercurial repo is required ' 'when specifying a revision') repo = self.topsrcdir # We need two finders because the reader's finder operates on # absolute paths. finder = FileFinder(self.topsrcdir) if repo: reader_finder = MercurialRevisionFinder(repo, rev=rev, recognize_repo_paths=True) else: reader_finder = default_finder # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if '*' not in p: if p not in all_paths_set: all_paths_set.add(p) allpaths.append(p) continue if repo: raise InvalidPathException('cannot use wildcard in version control mode') for path, f in finder.find(p): if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) reader = self._get_reader(finder=reader_finder) return reader.files_info(allpaths)
def __init__(self, wrapper=None, compiler='', version='', type='', flags=None): if flags is None: flags = [] if wrapper is None: wrapper = [] super(CompilerResult, self).__init__( flags=flags, version=version, type=type, compiler=mozpath.abspath(compiler), wrapper=wrapper, )
def test_simple_program(self): def mock_simple_prog(_, args): if len(args) == 1 and args[0] == '--help': return 0, 'simple program help...', '' self.fail("Unexpected arguments to mock_simple_program: %s" % args) prog_path = mozpath.abspath('/simple/prog') cmd = "log.info(check_cmd_output('%s', '--help'))" % prog_path config, out, status = self.get_result(cmd, paths={prog_path: mock_simple_prog}) self.assertEqual(config, {}) self.assertEqual(status, 0) self.assertEqual(out, 'simple program help...\n')
def get_value_for(args=[], environ={}, mozconfig=''): sandbox = self.get_sandbox({}, {}, args, environ, mozconfig) # Add a fake old-configure option sandbox.option_impl('--with-foo', nargs='*', help='Help missing for old configure options') # Remove all implied options, otherwise, getting # all_configure_options below triggers them, and that triggers # configure parts that aren't expected to run during this test. del sandbox._implied_options[:] result = sandbox._value_for(sandbox['all_configure_options']) shell = mozpath.abspath('/bin/sh') return result.replace('CONFIG_SHELL=%s ' % shell, '')
def test_config_file_substitution(self): reader = self.reader('config-file-substitution') objs = self.read_topsrcdir(reader) self.assertEqual(len(objs), 3) self.assertIsInstance(objs[0], DirectoryTraversal) self.assertIsInstance(objs[1], ConfigFileSubstitution) self.assertIsInstance(objs[2], ConfigFileSubstitution) topobjdir = mozpath.abspath(reader.config.topobjdir) self.assertEqual(objs[1].relpath, 'foo') self.assertEqual(mozpath.normpath(objs[1].output_path), mozpath.normpath(mozpath.join(topobjdir, 'foo'))) self.assertEqual(mozpath.normpath(objs[2].output_path), mozpath.normpath(mozpath.join(topobjdir, 'bar')))
def iter_modules_in_path(*paths): normal_paths = [os.path.abspath(os.path.normcase(p)) + os.sep for p in paths] for name, module in sys.modules.items(): if not hasattr(module, '__file__'): continue path = module.__file__ if path.endswith('.pyc'): path = path[:-1] normal_path = os.path.abspath(os.path.normcase(path)) if any(normal_path.startswith(p) for p in normal_paths): yield mozpath.abspath(path)
def _get_files_info(self, paths, rev=None): reader = self.mozbuild_reader(config_mode='empty', vcs_revision=rev) # Normalize to relative from topsrcdir. relpaths = [] for p in paths: a = mozpath.abspath(p) if not mozpath.basedir(a, [self.topsrcdir]): raise InvalidPathException('path is outside topsrcdir: %s' % p) relpaths.append(mozpath.relpath(a, self.topsrcdir)) # Expand wildcards. # One variable is for ordering. The other for membership tests. # (Membership testing on a list can be slow.) allpaths = [] all_paths_set = set() for p in relpaths: if '*' not in p: if p not in all_paths_set: if not os.path.exists(mozpath.join(self.topsrcdir, p)): print('(%s does not exist; ignoring)' % p, file=sys.stderr) continue all_paths_set.add(p) allpaths.append(p) continue if rev: raise InvalidPathException('cannot use wildcard in version control mode') # finder is rooted at / for now. # TODO bug 1171069 tracks changing to relative. search = mozpath.join(self.topsrcdir, p)[1:] for path, f in reader.finder.find(search): path = path[len(self.topsrcdir):] if path not in all_paths_set: all_paths_set.add(path) allpaths.append(path) return reader.files_info(allpaths)
def read_from_gyp(config, path, output, vars, non_unified_sources=set()): """Read a gyp configuration and emits GypContexts for the backend to process. config is a ConfigEnvironment, path is the path to a root gyp configuration file, output is the base path under which the objdir for the various gyp dependencies will be, and vars a dict of variables to pass to the gyp processor. """ time_start = time.time() all_sources = set() # gyp expects plain str instead of unicode. The frontend code gives us # unicode strings, so convert them. path = encode(path) str_vars = dict((name, encode(value)) for name, value in vars.items()) params = { b'parallel': False, b'generator_flags': {}, b'build_files': [path], } # Files that gyp_chromium always includes includes = [encode(mozpath.join(script_dir, 'common.gypi'))] finder = FileFinder(chrome_src, find_executables=False) includes.extend( encode(mozpath.join(chrome_src, name)) for name, _ in finder.find('*/supplement.gypi')) # Read the given gyp file and its dependencies. generator, flat_list, targets, data = \ gyp.Load([path], format=b'mozbuild', default_variables=str_vars, includes=includes, depth=encode(mozpath.dirname(path)), params=params) # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace(b'/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget( target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext( config, mozpath.relpath(mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source( mozpath.abspath(mozpath.join(mozpath.dirname(build_file), f))) context['IS_GYP_DIR'] = True spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs['MOZ_DEBUG'] else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if spec['type'] == 'none': continue elif spec['type'] == 'static_library': # Remove leading 'lib' from the target_name if any, and use as # library name. name = spec['target_name'] if name.startswith('lib'): name = name[3:] # The context expects an unicode string. context['LIBRARY_NAME'] = name.decode('utf-8') # gyp files contain headers and asm sources in sources lists. sources = set( mozpath.normpath(mozpath.join(context.srcdir, f)) for f in spec.get('sources', []) if mozpath.splitext(f)[-1] != '.h') asm_sources = set(f for f in sources if f.endswith('.S')) unified_sources = sources - non_unified_sources - asm_sources sources -= unified_sources all_sources |= sources # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) for define in target_conf.get('defines', []): if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True for include in target_conf.get('include_dirs', []): # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath( mozpath.join(mozpath.dirname(build_file), include)) if not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['EXTRA_ASSEMBLER_FLAGS'] = target_conf.get( 'asflags_mozilla', []) context['EXTRA_COMPILE_FLAGS'] = target_conf.get( 'cflags_mozilla', []) else: # Ignore other types than static_library because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '/ipc/chromium/src', '/ipc/glue', ] context['GENERATED_INCLUDES'] += ['/ipc/ipdl/_ipdlheaders'] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['DISABLE_STL_WRAPPING'] = True context.execution_time = time.time() - time_start yield context time_start = time.time()
def test_java_tool_checks_linux(self): def run_configure_java(mock_fs_paths, mock_java_home=None, mock_path=None, args=[]): script = textwrap.dedent("""\ @depends('--help') def host(_): return namespace(os='unknown') include('%(topsrcdir)s/build/moz.configure/java.configure') """ % {"topsrcdir": topsrcdir}) def mock_which(exe, path=None): for mock_fs_path in mock_fs_paths.keys(): (base, filename) = os.path.split(mock_fs_path) if filename == exe: if path and os.path.normpath(base) != os.path.normpath( path): continue return mock_fs_path # Don't let system JAVA_HOME influence the test original_java_home = os.environ.pop("JAVA_HOME", None) configure_environ = {} if mock_java_home: os.environ["JAVA_HOME"] = mock_java_home configure_environ["JAVA_HOME"] = mock_java_home if mock_path: configure_environ["PATH"] = mock_path # * Don't attempt to invoke Java, just resolve each mock Java's version as "1.8" # * Even if the real file sysphabtem has a symlink at the mocked path, don't let # realpath follow it, as it may influence the test. # * When finding a binary, check the mock paths rather than the real filesystem. # Note: Python doesn't allow the different "with" bits to be put in parenthesis, # because then it thinks it's an un-with-able tuple. Additionally, if this is cleanly # lined up with "\", black removes them and autoformats them to the block that is # below. with patch("mozboot.util._resolve_java_version", return_value="1.8"), patch( "os.path.realpath", side_effect=lambda path: path), patch( "mozboot.util.which", side_effect=mock_which): result = self.get_result( args=args, command=script, extra_paths=paths, environ=configure_environ, ) if original_java_home: os.environ["JAVA_HOME"] = original_java_home return result java = mozpath.abspath("/usr/bin/java") javac = mozpath.abspath("/usr/bin/javac") paths = {java: None, javac: None} config, out, status = run_configure_java(paths) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % java), ) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath("/usr/local/bin/java") alt_javac = mozpath.abspath("/usr/local/bin/javac") alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths = {alt_java: None, alt_javac: None, java: None, javac: None} alt_path = mozpath.dirname(java) config, out, status = run_configure_java(paths, alt_java_home, alt_path) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = run_configure_java( paths, mock_path=mozpath.dirname(java), args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = run_configure_java( paths, mock_java_home=mozpath.dirname(mozpath.dirname(java)), mock_path=mozpath.dirname(java), args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE. alt_java_home = mozpath.dirname(mozpath.dirname(java)) config, out, status = run_configure_java( paths, mock_java_home=alt_java_home, mock_path=mozpath.dirname(java), args=["--enable-java-coverage"], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": java, "MOZ_JAVA_CODE_COVERAGE": True, }, ) # Any missing tool is fatal when these checks run. paths = {} config, out, status = run_configure_java( mock_fs_paths={}, mock_path=mozpath.dirname(java), args=["--enable-java-coverage"], ) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ ERROR: Could not find "java" on the $PATH. Please install the Java 1.8 JDK \ and/or set $JAVA_HOME. """), )
def test_java_tool_checks(self): includes = ('util.configure', 'checks.configure', 'java.configure') def mock_valid_javac(_, args): if len(args) == 1 and args[0] == '-version': return 0, '1.8', '' self.fail("Unexpected arguments to mock_valid_javac: %s" % args) # A valid set of tools in a standard location. java = mozpath.abspath('/usr/bin/java') javah = mozpath.abspath('/usr/bin/javah') javac = mozpath.abspath('/usr/bin/javac') jar = mozpath.abspath('/usr/bin/jar') jarsigner = mozpath.abspath('/usr/bin/jarsigner') keytool = mozpath.abspath('/usr/bin/keytool') paths = { java: None, javah: None, javac: mock_valid_javac, jar: None, jarsigner: None, keytool: None, } config, out, status = self.get_result(includes=includes, extra_paths=paths) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': java, 'JAVAH': javah, 'JAVAC': javac, 'JAR': jar, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 ''' % (java, javah, jar, jarsigner, keytool, javac))) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath('/usr/local/bin/java') alt_javah = mozpath.abspath('/usr/local/bin/javah') alt_javac = mozpath.abspath('/usr/local/bin/javac') alt_jar = mozpath.abspath('/usr/local/bin/jar') alt_jarsigner = mozpath.abspath('/usr/local/bin/jarsigner') alt_keytool = mozpath.abspath('/usr/local/bin/keytool') alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths.update({ alt_java: None, alt_javah: None, alt_javac: mock_valid_javac, alt_jar: None, alt_jarsigner: None, alt_keytool: None, }) config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'JAVA_HOME': alt_java_home, 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac))) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = self.get_result( args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], includes=includes, extra_paths=paths, environ={'PATH': mozpath.dirname(java)}) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac))) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = self.get_result( args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), }) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac))) def mock_old_javac(_, args): if len(args) == 1 and args[0] == '-version': return 0, '1.6.9', '' self.fail("Unexpected arguments to mock_old_javac: %s" % args) # An old javac is fatal. paths[javac] = mock_old_javac config, out, status = self.get_result( includes=includes, extra_paths=paths, environ={'PATH': mozpath.dirname(java)}) self.assertEqual(status, 1) self.assertEqual( config, { 'JAVA': java, 'JAVAH': javah, 'JAVAC': javac, 'JAR': jar, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... ERROR: javac 1.8 or higher is required (found 1.6.9). Check the JAVA_HOME environment variable. ''' % (java, javah, jar, jarsigner, keytool, javac))) # Any missing tool is fatal when these checks run. del paths[jarsigner] config, out, status = self.get_result( includes=includes, extra_paths=paths, environ={'PATH': mozpath.dirname(java)}) self.assertEqual(status, 1) self.assertEqual(config, { 'JAVA': java, 'JAVAH': javah, 'JAR': jar, 'JARSIGNER': ':', }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... not found ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}' ''' % (java, javah, jar)))
def process_gn_config(gn_config, srcdir, config, output, non_unified_sources, sandbox_vars, mozilla_flags): # Translates a json gn config into attributes that can be used to write out # moz.build files for this configuration. # Much of this code is based on similar functionality in `gyp_reader.py`. mozbuild_attrs = { 'mozbuild_args': gn_config.get('mozbuild_args', None), 'dirs': {} } targets = gn_config["targets"] project_relsrcdir = mozpath.relpath(srcdir, config.topsrcdir) def target_info(fullname): path, name = target_fullname.split(':') # Stripping '//' gives us a path relative to the project root, # adding a suffix avoids name collisions with libraries already # in the tree (like "webrtc"). return path.lstrip('//'), name + '_gn' # Process all targets from the given gn project and its dependencies. for target_fullname, spec in targets.iteritems(): target_path, target_name = target_info(target_fullname) context_attrs = {} # Remove leading 'lib' from the target_name if any, and use as # library name. name = target_name if spec['type'] in ('static_library', 'shared_library', 'source_set'): if name.startswith('lib'): name = name[3:] context_attrs['LIBRARY_NAME'] = name.decode('utf-8') else: raise Exception('The following GN target type is not currently ' 'consumed by moz.build: "%s". It may need to be ' 'added, or you may need to re-run the ' '`GnConfigGen` step.' % spec['type']) if spec['type'] == 'shared_library': context_attrs['FORCE_SHARED_LIB'] = True sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): f = f.lstrip("//") ext = mozpath.splitext(f)[-1] extensions.add(ext) src = '%s/%s' % (project_relsrcdir, f) if ext == '.h': continue elif ext == '.def': context_attrs['SYMBOLS_FILE'] = src elif ext != '.S' and src not in non_unified_sources: unified_sources.append('/%s' % src) else: sources.append('/%s' % src) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True context_attrs['SOURCES'] = sources context_attrs['UNIFIED_SOURCES'] = unified_sources context_attrs['DEFINES'] = {} for define in spec.get('defines', []): if '=' in define: name, value = define.split('=', 1) context_attrs['DEFINES'][name] = value else: context_attrs['DEFINES'][define] = True context_attrs['LOCAL_INCLUDES'] = [] for include in spec.get('include_dirs', []): # GN will have resolved all these paths relative to the root of # the project indicated by "//". if include.startswith('//'): include = include[2:] # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. if include.startswith('/'): resolved = mozpath.abspath( mozpath.join(config.topsrcdir, include[1:])) else: resolved = mozpath.abspath(mozpath.join(srcdir, include)) if not os.path.exists(resolved): # GN files may refer to include dirs that are outside of the # tree or we simply didn't vendor. Print a warning in this case. if not resolved.endswith('gn-output/gen'): print( "Included path: '%s' does not exist, dropping include from GN " "configuration." % resolved, file=sys.stderr) continue if not include.startswith('/'): include = '/%s/%s' % (project_relsrcdir, include) context_attrs['LOCAL_INCLUDES'] += [include] context_attrs['ASFLAGS'] = spec.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context_attrs['ASFLAGS'] += ['-D' + d for d in defines] flags = [f for f in spec.get('cflags', []) if f in mozilla_flags] if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = (suffix_map[e] for e in extensions if e in suffix_map) for var in variables: for f in flags: # We may be getting make variable references out of the # gn data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, types.StringTypes): context_attrs.setdefault(var, []).append(f) else: context_attrs.setdefault(var, []).extend(f) context_attrs['OS_LIBS'] = [] for lib in spec.get('libs', []): lib_name = os.path.splitext(lib)[0] if lib.endswith('.framework'): context_attrs['OS_LIBS'] += ['-framework ' + lib_name] else: context_attrs['OS_LIBS'] += [lib_name] # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context_attrs['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. # TODO: Determine if these defines are needed for GN builds. if gn_config['mozbuild_args']['OS_TARGET'] == 'WINNT': context_attrs['DEFINES']['UNICODE'] = True context_attrs['DEFINES']['_UNICODE'] = True context_attrs['COMPILE_FLAGS'] = { 'OS_INCLUDES': [], } for key, value in sandbox_vars.items(): if context_attrs.get(key) and isinstance(context_attrs[key], list): # If we have a key from sandbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context_attrs[key] = value + context_attrs[key] elif context_attrs.get(key) and isinstance(context_attrs[key], dict): context_attrs[key].update(value) else: context_attrs[key] = value target_relsrcdir = mozpath.join(project_relsrcdir, target_path, target_name) mozbuild_attrs['dirs'][target_relsrcdir] = context_attrs return mozbuild_attrs
memoize, ) from .reader import SandboxValidationError # Define this module as gyp.generator.mozbuild so that gyp can use it # as a generator under the name "mozbuild". sys.modules['gyp.generator.mozbuild'] = sys.modules[__name__] # build/gyp_chromium does this: # script_dir = os.path.dirname(os.path.realpath(__file__)) # chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir)) # sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib')) # We're not importing gyp_chromium, but we want both script_dir and # chrome_src for the default includes, so go backwards from the pylib # directory, which is the parent directory of gyp module. chrome_src = mozpath.abspath( mozpath.join(mozpath.dirname(gyp.__file__), '../../../..')) script_dir = mozpath.join(chrome_src, 'build') def encode(value): if isinstance(value, unicode): return value.encode('utf-8') return value # Default variables gyp uses when evaluating gyp files. generator_default_variables = {} for dirname in [ b'INTERMEDIATE_DIR', b'SHARED_INTERMEDIATE_DIR', b'PRODUCT_DIR', b'LIB_DIR', b'SHARED_LIB_DIR' ]:
def android_geckoview_docs( self, command_context, archive, upload, upload_branch, javadoc_path, upload_message, ): tasks = (self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS"] if archive or upload else self.substs["GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS"]) ret = self.gradle(command_context, tasks, verbose=True) if ret or not upload: return ret # Upload to Github. fmt = { "level": os.environ.get("MOZ_SCM_LEVEL", "0"), "project": os.environ.get("MH_BRANCH", "unknown"), "revision": os.environ.get("GECKO_HEAD_REV", "tip"), } env = {} # In order to push to GitHub from TaskCluster, we store a private key # in the TaskCluster secrets store in the format {"content": "<KEY>"}, # and the corresponding public key as a writable deploy key for the # destination repo on GitHub. secret = os.environ.get("GECKOVIEW_DOCS_UPLOAD_SECRET", "").format(**fmt) if secret: # Set up a private key from the secrets store if applicable. import requests req = requests.get("http://taskcluster/secrets/v1/secret/" + secret) req.raise_for_status() keyfile = mozpath.abspath("gv-docs-upload-key") with open(keyfile, "w") as f: os.chmod(keyfile, 0o600) f.write(req.json()["secret"]["content"]) # Turn off strict host key checking so ssh does not complain about # unknown github.com host. We're not pushing anything sensitive, so # it's okay to not check GitHub's host keys. env["GIT_SSH_COMMAND"] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile # Clone remote repo. branch = upload_branch.format(**fmt) repo_url = "[email protected]:%s.git" % upload repo_path = mozpath.abspath("gv-docs-repo") self.run_process( [ "git", "clone", "--branch", upload_branch, "--depth", "1", repo_url, repo_path, ], append_env=env, pass_thru=True, ) env["GIT_DIR"] = mozpath.join(repo_path, ".git") env["GIT_WORK_TREE"] = repo_path env["GIT_AUTHOR_NAME"] = env[ "GIT_COMMITTER_NAME"] = "GeckoView Docs Bot" env["GIT_AUTHOR_EMAIL"] = env[ "GIT_COMMITTER_EMAIL"] = "*****@*****.**" # Copy over user documentation. import mozfile # Extract new javadoc to specified directory inside repo. src_tar = mozpath.join( self.topobjdir, "gradle", "build", "mobile", "android", "geckoview", "libs", "geckoview-javadoc.jar", ) dst_path = mozpath.join(repo_path, javadoc_path.format(**fmt)) mozfile.remove(dst_path) mozfile.extract_zip(src_tar, dst_path) # Commit and push. self.run_process(["git", "add", "--all"], append_env=env, pass_thru=True) if (self.run_process( ["git", "diff", "--cached", "--quiet"], append_env=env, pass_thru=True, ensure_exit_code=False, ) != 0): # We have something to commit. self.run_process( ["git", "commit", "--message", upload_message.format(**fmt)], append_env=env, pass_thru=True, ) self.run_process(["git", "push", "origin", branch], append_env=env, pass_thru=True) mozfile.remove(repo_path) if secret: mozfile.remove(keyfile) return 0
UnifiedSources, VariablePassthru, ) from mozbuild.frontend.emitter import TreeMetadataEmitter from mozbuild.frontend.reader import ( BuildReader, BuildReaderError, SandboxValidationError, ) from mozbuild.test.common import MockConfig import mozpack.path as mozpath data_path = mozpath.abspath(mozpath.dirname(__file__)) data_path = mozpath.join(data_path, 'data') class TestEmitterBasic(unittest.TestCase): def setUp(self): self._old_env = dict(os.environ) os.environ.pop('MOZ_OBJDIR', None) def tearDown(self): os.environ.clear() os.environ.update(self._old_env) def reader(self, name): config = MockConfig(mozpath.join(data_path, name), extra_substs=dict( ENABLE_TESTS='1',
def test_compiler_result(self): result = CompilerResult() self.assertEquals( result.__dict__, { "wrapper": [], "compiler": mozpath.abspath(""), "version": "", "type": "", "language": "", "flags": [], }, ) result = CompilerResult( compiler="/usr/bin/gcc", version="4.2.1", type="gcc", language="C", flags=["-std=gnu99"], ) self.assertEquals( result.__dict__, { "wrapper": [], "compiler": mozpath.abspath("/usr/bin/gcc"), "version": "4.2.1", "type": "gcc", "language": "C", "flags": ["-std=gnu99"], }, ) result2 = result + {"flags": ["-m32"]} self.assertEquals( result2.__dict__, { "wrapper": [], "compiler": mozpath.abspath("/usr/bin/gcc"), "version": "4.2.1", "type": "gcc", "language": "C", "flags": ["-std=gnu99", "-m32"], }, ) # Original flags are untouched. self.assertEquals(result.flags, ["-std=gnu99"]) result3 = result + { "compiler": "/usr/bin/gcc-4.7", "version": "4.7.3", "flags": ["-m32"], } self.assertEquals( result3.__dict__, { "wrapper": [], "compiler": mozpath.abspath("/usr/bin/gcc-4.7"), "version": "4.7.3", "type": "gcc", "language": "C", "flags": ["-std=gnu99", "-m32"], }, )
class TestChecksConfigure(unittest.TestCase): def test_checking(self): out = StringIO() sandbox = ConfigureSandbox({}, stdout=out, stderr=out) base_dir = os.path.join(topsrcdir, 'build', 'moz.configure') sandbox.include_file(os.path.join(base_dir, 'checks.configure')) exec_( textwrap.dedent(''' @checking('for a thing') def foo(value): return value '''), sandbox) foo = sandbox['foo'] foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... no\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) data = ['foo', 'bar'] foo(data) self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data) # When the function given to checking does nothing interesting, the # behavior is not altered exec_( textwrap.dedent(''' @checking('for a thing', lambda x: x) def foo(value): return value '''), sandbox) foo = sandbox['foo'] out.truncate(0) foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... no\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) data = ['foo', 'bar'] foo(data) self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data) exec_( textwrap.dedent(''' def munge(x): if not x: return 'not found' if isinstance(x, (str, bool, int)): return x return ' '.join(x) @checking('for a thing', munge) def foo(value): return value '''), sandbox) foo = sandbox['foo'] out.truncate(0) foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... not found\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) foo(['foo', 'bar']) self.assertEqual(out.getvalue(), 'checking for a thing... foo bar\n') KNOWN_A = mozpath.abspath('/usr/bin/known-a') KNOWN_B = mozpath.abspath('/usr/local/bin/known-b') KNOWN_C = mozpath.abspath('/home/user/bin/known c') def get_result(self, command='', args=[], environ={}, prog='/bin/configure'): config = {} out = StringIO() paths = { self.KNOWN_A: None, self.KNOWN_B: None, self.KNOWN_C: None, } environ = dict(environ) environ['PATH'] = os.pathsep.join(os.path.dirname(p) for p in paths) sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args, out, out) base_dir = os.path.join(topsrcdir, 'build', 'moz.configure') sandbox.include_file(os.path.join(base_dir, 'util.configure')) sandbox.include_file(os.path.join(base_dir, 'checks.configure')) status = 0 try: exec_(command, sandbox) sandbox.run() except SystemExit as e: status = e.code return config, out.getvalue(), status def test_check_prog(self): config, out, status = self.get_result( 'check_prog("FOO", ("known-a",))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_B}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_B) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_C}) self.assertEqual(out, "checking for foo... '%s'\n" % self.KNOWN_C) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown ERROR: Cannot find foo ''')) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown DEBUG: foo: Trying unknown-2 DEBUG: foo: Trying 'unknown 3' ERROR: Cannot find foo ''')) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' 'allow_missing=True)') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': ':'}) self.assertEqual(out, 'checking for foo... not found\n') def test_check_prog_with_args(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % self.KNOWN_A]) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) path = self.KNOWN_B.replace('known-b', 'known-a') config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % path]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying %s ERROR: Cannot find foo ''') % path) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))', ['FOO=known c']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_C}) self.assertEqual(out, "checking for foo... '%s'\n" % self.KNOWN_C) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' 'allow_missing=True)', ['FOO=unknown']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown ERROR: Cannot find foo ''')) def test_check_prog_what(self): config, out, status = self.get_result( 'check_prog("CC", ("known-a",), what="the target C compiler")') self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual( out, 'checking for the target C compiler... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),' ' what="the target C compiler")') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for the target C compiler... not found DEBUG: cc: Trying unknown DEBUG: cc: Trying unknown-2 DEBUG: cc: Trying 'unknown 3' ERROR: Cannot find the target C compiler ''')) def test_check_prog_input(self): config, out, status = self.get_result( textwrap.dedent(''' option("--with-ccache", nargs=1, help="ccache") check_prog("CCACHE", ("known-a",), input="--with-ccache") '''), ['--with-ccache=known-b']) self.assertEqual(status, 0) self.assertEqual(config, {'CCACHE': self.KNOWN_B}) self.assertEqual(out, 'checking for ccache... %s\n' % self.KNOWN_B) script = textwrap.dedent(''' option(env="CC", nargs=1, help="compiler") @depends("CC") def compiler(value): return value[0].split()[0] if value else None check_prog("CC", ("known-a",), input=compiler) ''') config, out, status = self.get_result(script) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) config, out, status = self.get_result(script, ['CC=known-b']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_B}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B) config, out, status = self.get_result(script, ['CC=known-b -m32']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_B}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B) def test_check_prog_progs(self): config, out, status = self.get_result('check_prog("FOO", ())') self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, '') config, out, status = self.get_result('check_prog("FOO", ())', ['FOO=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) script = textwrap.dedent(''' option(env="TARGET", nargs=1, default="linux", help="target") @depends("TARGET") def compiler(value): if value: if value[0] == "linux": return ("gcc", "clang") if value[0] == "winnt": return ("cl", "clang-cl") check_prog("CC", compiler) ''') config, out, status = self.get_result(script) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying gcc DEBUG: cc: Trying clang ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=linux']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying gcc DEBUG: cc: Trying clang ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=winnt']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying cl DEBUG: cc: Trying clang-cl ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=none']) self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, '') config, out, status = self.get_result(script, ['TARGET=winnt', 'CC=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) config, out, status = self.get_result(script, ['TARGET=none', 'CC=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) def test_check_prog_configure_error(self): with self.assertRaises(ConfigureError) as e: self.get_result('check_prog("FOO", "foo")') self.assertEqual(e.exception.message, 'progs must resolve to a list or tuple!') with self.assertRaises(ConfigureError) as e: self.get_result('foo = depends("--help")(lambda h: ("a", "b"))\n' 'check_prog("FOO", ("known-a",), input=foo)') self.assertEqual( e.exception.message, 'input must resolve to a tuple or a list with a ' 'single element, or a string') with self.assertRaises(ConfigureError) as e: self.get_result('foo = depends("--help")(lambda h: {"a": "b"})\n' 'check_prog("FOO", ("known-a",), input=foo)') self.assertEqual( e.exception.message, 'input must resolve to a tuple or a list with a ' 'single element, or a string')
def test_java_tool_checks(self): # A valid set of tools in a standard location. java = mozpath.abspath('/usr/bin/java') jarsigner = mozpath.abspath('/usr/bin/jarsigner') keytool = mozpath.abspath('/usr/bin/keytool') paths = { java: None, jarsigner: None, keytool: None, } script = textwrap.dedent('''\ @depends('--help') def host(_): return namespace(os='unknown') include('%(topsrcdir)s/build/moz.configure/java.configure') ''' % {'topsrcdir': topsrcdir}) config, out, status = self.get_result(command=script, extra_paths=paths) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (java, jarsigner, keytool))) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath('/usr/local/bin/java') alt_jarsigner = mozpath.abspath('/usr/local/bin/jarsigner') alt_keytool = mozpath.abspath('/usr/local/bin/keytool') alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths.update({ alt_java: None, alt_jarsigner: None, alt_keytool: None, }) config, out, status = self.get_result(command=script, extra_paths=paths, environ={ 'JAVA_HOME': alt_java_home, 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = self.get_result( command=script, args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = self.get_result( command=script, args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE. config, out, status = self.get_result( command=script, args=['--enable-java-coverage'], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, 'MOZ_JAVA_CODE_COVERAGE': True, }) # Any missing tool is fatal when these checks run. del paths[jarsigner] config, out, status = self.get_result(command=script, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 1) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': ':', }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... not found ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}' ''' % (java)))
def __init__( self, topsrcdir, topobjdir, defines=None, substs=None, source=None, mozconfig=None, ): if not source: source = mozpath.join(topobjdir, "config.status") self.source = source self.defines = ReadOnlyDict(defines or {}) self.substs = dict(substs or {}) self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath(topobjdir) self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None self.lib_prefix = self.substs.get("LIB_PREFIX", "") if "LIB_SUFFIX" in self.substs: self.lib_suffix = ".%s" % self.substs["LIB_SUFFIX"] self.dll_prefix = self.substs.get("DLL_PREFIX", "") self.dll_suffix = self.substs.get("DLL_SUFFIX", "") self.host_dll_prefix = self.substs.get("HOST_DLL_PREFIX", "") self.host_dll_suffix = self.substs.get("HOST_DLL_SUFFIX", "") if self.substs.get("IMPORT_LIB_SUFFIX"): self.import_prefix = self.lib_prefix self.import_suffix = ".%s" % self.substs["IMPORT_LIB_SUFFIX"] else: self.import_prefix = self.dll_prefix self.import_suffix = self.dll_suffix self.bin_suffix = self.substs.get("BIN_SUFFIX", "") global_defines = [name for name in self.defines] self.substs["ACDEFINES"] = " ".join( [ "-D%s=%s" % (name, shell_quote(self.defines[name]).replace("$", "$$")) for name in sorted(global_defines) ] ) def serialize(name, obj): if isinstance(obj, six.string_types): return obj if isinstance(obj, Iterable): return " ".join(obj) raise Exception("Unhandled type %s for %s", type(obj), str(name)) self.substs["ALLSUBSTS"] = "\n".join( sorted( [ "%s = %s" % (name, serialize(name, self.substs[name])) for name in self.substs if self.substs[name] ] ) ) self.substs["ALLEMPTYSUBSTS"] = "\n".join( sorted(["%s =" % name for name in self.substs if not self.substs[name]]) ) self.substs = ReadOnlyDict(self.substs)
def shell(self, stdin, args): script = mozpath.abspath(args[0]) if script in self._subprocess_paths: return self._subprocess_paths[script](stdin, args[1:]) return 127, '', 'File not found'
def setUpClass(cls): class Config(object): pass cls.config = config = Config() config.topsrcdir = mozpath.abspath(os.curdir) config.topobjdir = mozpath.abspath('obj') config.external_source_dir = None
def repackage_msix( dir_or_package, channel=None, branding=None, template=None, distribution_dirs=[], locale_allowlist=set(), version=None, vendor=None, displayname=None, app_name="firefox", identity=None, publisher=None, publisher_display_name="Mozilla Corporation", arch=None, output=None, force=False, log=None, verbose=False, makeappx=None, ): if not channel: raise Exception("channel is required") if channel not in ["official", "beta", "aurora", "nightly", "unofficial"]: raise Exception("channel is unrecognized: {}".format(channel)) if not branding: raise Exception("branding dir is required") if not os.path.isdir(branding): raise Exception("branding dir {} does not exist".format(branding)) # TODO: maybe we can fish this from the package directly? Maybe from a DLL, # maybe from application.ini? if arch is None or arch not in _MSIX_ARCH.keys(): raise Exception( "arch name must be provided and one of {}.".format(_MSIX_ARCH.keys()) ) if not os.path.exists(dir_or_package): raise Exception("{} does not exist".format(dir_or_package)) if ( os.path.isfile(dir_or_package) and os.path.splitext(dir_or_package)[1] == ".msix" ): # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. msix_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-unpack", ) ) if os.path.exists(msix_dir): shutil.rmtree(msix_dir) ensureParentDir(msix_dir) dir_or_package = unpack_msix(dir_or_package, msix_dir, log=log, verbose=verbose) log( logging.INFO, "msix", { "input": dir_or_package, }, "Adding files from '{input}'", ) if os.path.isdir(dir_or_package): finder = FileFinder(dir_or_package) else: finder = JarFinder(dir_or_package, JarReader(dir_or_package)) values = get_application_ini_values( finder, dict(section="App", value="CodeName", fallback="Name"), dict(section="App", value="Vendor"), ) first = next(values) displayname = displayname or "Mozilla {}".format(first) second = next(values) vendor = vendor or second # For `AppConstants.jsm` and `brand.properties`, which are in the omnijar in packaged builds. # The nested langpack XPI files can't be read by `mozjar.py`. unpack_finder = UnpackFinder(finder, unpack_xpi=False) if not version: values = get_appconstants_jsm_values( unpack_finder, "MOZ_APP_VERSION_DISPLAY", "MOZ_BUILDID" ) display_version = next(values) buildid = next(values) version = get_embedded_version(display_version, buildid) log( logging.INFO, "msix", { "version": version, "display_version": display_version, "buildid": buildid, }, "AppConstants.jsm display version is '{display_version}' and build ID is '{buildid}':" + " embedded version will be '{version}'", ) # TODO: Bug 1721922: localize this description via Fluent. lines = [] for _, f in unpack_finder.find("**/chrome/en-US/locale/branding/brand.properties"): lines.extend( line for line in f.open().read().decode("utf-8").splitlines() if "brandFullName" in line ) (brandFullName,) = lines # We expect exactly one definition. _, _, brandFullName = brandFullName.partition("=") brandFullName = brandFullName.strip() # We don't have a build at repackage-time to gives us this value, and the # source of truth is a branding-specific `configure.sh` shell script that we # can't easily evaluate completely here. Instead, we take the last value # from `configure.sh`. lines = [ line for line in open(mozpath.join(branding, "configure.sh")).readlines() if "MOZ_IGECKOBACKCHANNEL_IID" in line ] MOZ_IGECKOBACKCHANNEL_IID = lines[-1] _, _, MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.partition("=") MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID.strip() if MOZ_IGECKOBACKCHANNEL_IID.startswith(('"', "'")): MOZ_IGECKOBACKCHANNEL_IID = MOZ_IGECKOBACKCHANNEL_IID[1:-1] # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. output_dir = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "msix-temp-{}".format(channel) ) ) if channel == "beta": # Release (official) and Beta share branding. Differentiate Beta a little bit. displayname += " Beta" brandFullName += " Beta" # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox Beta # Package Root'. This is `BrandFullName` in the installer, and we want to # be close but to not match. By not matching, we hope to prevent confusion # and/or errors between regularly installed builds and App Package builds. instdir = "{} Package Root".format(displayname) # The standard package name is like "CompanyNoSpaces.ProductNoSpaces". identity = identity or "{}.{}".format(vendor, displayname).replace(" ", "") # We might want to include the publisher ID hash here. I.e., # "__{publisherID}". My locally produced MSIX was named like # `Mozilla.MozillaFirefoxNightly_89.0.0.0_x64__4gf61r4q480j0`, suggesting also a # missing field, but it's not necessary, since this is just an output file name. package_output_name = "{identity}_{version}_{arch}".format( identity=identity, version=version, arch=_MSIX_ARCH[arch] ) # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. default_output = mozpath.normsep( mozpath.join( get_state_dir(), "cache", "mach-msix", "{}.msix".format(package_output_name) ) ) output = output or default_output log(logging.INFO, "msix", {"output": output}, "Repackaging to: {output}") m = InstallManifest() m.add_copy(mozpath.join(template, "Resources.pri"), "Resources.pri") m.add_pattern_copy(mozpath.join(branding, "msix", "Assets"), "**", "Assets") m.add_pattern_copy(mozpath.join(template, "VFS"), "**", "VFS") copier = FileCopier() # TODO: Bug 1710147: filter out MSVCRT files and use a dependency instead. for p, f in finder: if not os.path.isdir(dir_or_package): # In archived builds, `p` is like "firefox/firefox.exe"; we want just "firefox.exe". pp = os.path.relpath(p, "firefox") else: # In local builds and unpacked MSIX directories, `p` is like "firefox.exe" already. pp = p if pp.startswith("distribution"): # Treat any existing distribution as a distribution directory, # potentially with language packs. This makes it easy to repack # unpacked MSIXes. distribution_dir = mozpath.join(dir_or_package, "distribution") if distribution_dir not in distribution_dirs: distribution_dirs.append(distribution_dir) continue copier.add(mozpath.normsep(mozpath.join("VFS", "ProgramFiles", instdir, pp)), f) # Locales to declare as supported in `AppxManifest.xml`. locales = set(["en-US"]) for distribution_dir in [ mozpath.join(template, "distribution") ] + distribution_dirs: log( logging.INFO, "msix", {"dir": distribution_dir}, "Adding distribution files from {dir}", ) # In automation, we have no easy way to remap the names of artifacts fetched from dependent # tasks. In particular, langpacks will be named like `target.langpack.xpi`. The fetch # tasks do allow us to put them in a per-locale directory, so that the entire set can be # fetched. Here we remap the names. finder = FileFinder(distribution_dir) for p, f in finder: locale = None if os.path.basename(p) == "target.langpack.xpi": # Turn "/path/to/LOCALE/target.langpack.xpi" into "LOCALE". This is how langpacks # are presented in CI. base, locale = os.path.split(os.path.dirname(p)) # Like "locale-LOCALE/[email protected]". This is what AMO # serves and how flatpak builds name langpacks, but not how snap builds name # langpacks. I can't explain the discrepancy. dest = mozpath.normsep( mozpath.join( base, f"locale-{locale}", f"langpack-{locale}@firefox.mozilla.org.xpi", ) ) log( logging.DEBUG, "msix", {"path": p, "dest": dest}, "Renaming langpack {path} to {dest}", ) elif os.path.basename(p).startswith("langpack-"): # Turn "/path/to/[email protected]" into "LOCALE". This is # how langpacks are presented from an unpacked MSIX. _, _, locale = os.path.basename(p).partition("langpack-") locale, _, _ = locale.partition("@") dest = p else: dest = p if locale: locale = locale.strip().lower() locales.add(locale) log( logging.DEBUG, "msix", {"locale": locale, "dest": dest}, "Distributing locale '{locale}' from {dest}", ) dest = mozpath.normsep( mozpath.join("VFS", "ProgramFiles", instdir, "distribution", dest) ) if copier.contains(dest): log( logging.INFO, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Skipping duplicate: {dest} from {path}", ) continue log( logging.DEBUG, "msix", {"dest": dest, "path": mozpath.join(finder.base, p)}, "Adding distribution path: {dest} from {path}", ) copier.add( dest, f, ) locales.remove("en-US") # Windows MSIX packages support a finite set of locales: see # https://docs.microsoft.com/en-us/windows/uwp/publish/supported-languages, which is encoded in # https://searchfox.org/mozilla-central/source/browser/installer/windows/msix/msix-all-locales. # We distribute all of the langpacks supported by the release channel in our MSIX, which is # encoded in https://searchfox.org/mozilla-central/source/browser/locales/all-locales. But we # only advertise support in the App manifest for the intersection of that set and the set of # supported locales. # # We distribute all langpacks to avoid the following issue. Suppose a user manually installs a # langpack that is not supported by Windows, and then updates the installed MSIX package. MSIX # package upgrades are essentially paveover installs, so there is no opportunity for Firefox to # update the langpack before the update. But, since all langpacks are bundled with the MSIX, # that langpack will be up-to-date, preventing one class of YSOD. unadvertised = set() if locale_allowlist: unadvertised = locales - locale_allowlist locales = locales & locale_allowlist for locale in sorted(unadvertised): log( logging.INFO, "msix", {"locale": locale}, "Not advertising distributed locale '{locale}' that is not recognized by Windows", ) locales = ["en-US"] + list(sorted(locales)) resource_language_list = "\n".join( f' <Resource Language="{locale}" />' for locale in sorted(locales) ) defines = { "APPX_ARCH": _MSIX_ARCH[arch], "APPX_DISPLAYNAME": brandFullName, "APPX_DESCRIPTION": brandFullName, # Like 'Mozilla.MozillaFirefox', 'Mozilla.MozillaFirefoxBeta', or # 'Mozilla.MozillaFirefoxNightly'. "APPX_IDENTITY": identity, # Like 'Firefox Package Root', 'Firefox Nightly Package Root', 'Firefox # Beta Package Root'. See above. "APPX_INSTDIR": instdir, # Like 'Firefox%20Package%20Root'. "APPX_INSTDIR_QUOTED": urllib.parse.quote(instdir), "APPX_PUBLISHER": publisher, "APPX_PUBLISHER_DISPLAY_NAME": publisher_display_name, "APPX_RESOURCE_LANGUAGE_LIST": resource_language_list, "APPX_VERSION": version, "MOZ_APP_DISPLAYNAME": displayname, "MOZ_APP_NAME": app_name, "MOZ_IGECKOBACKCHANNEL_IID": MOZ_IGECKOBACKCHANNEL_IID, } m.add_preprocess( mozpath.join(template, "AppxManifest.xml.in"), "AppxManifest.xml", [], defines=defines, marker="<!-- #", # So that we can have well-formed XML. ) m.populate_registry(copier) output_dir = mozpath.abspath(output_dir) ensureParentDir(output_dir) start = time.time() result = copier.copy( output_dir, remove_empty_directories=True, skip_if_older=not force ) if log: log_copy_result(log, time.time() - start, output_dir, result) if verbose: # Dump AppxManifest.xml contents for ease of debugging. log(logging.DEBUG, "msix", {}, "AppxManifest.xml") log(logging.DEBUG, "msix", {}, ">>>") for line in open(mozpath.join(output_dir, "AppxManifest.xml")).readlines(): log(logging.DEBUG, "msix", {}, line[:-1]) # Drop trailing line terminator. log(logging.DEBUG, "msix", {}, "<<<") if not makeappx: makeappx = find_sdk_tool("makeappx.exe", log=log) if not makeappx: raise ValueError( "makeappx is required; " "set MAKEAPPX or WINDOWSSDKDIR or PATH" ) # `makeappx.exe` supports both slash and hyphen style arguments; `makemsix` # supports only hyphen style. `makeappx.exe` allows to overwrite and to # provide more feedback, so we prefer invoking with these flags. This will # also accommodate `wine makeappx.exe`. stdout = subprocess.run( [makeappx], check=False, capture_output=True, universal_newlines=True ).stdout is_makeappx = "MakeAppx Tool" in stdout if is_makeappx: args = [makeappx, "pack", "/d", output_dir, "/p", output, "/overwrite"] else: args = [makeappx, "pack", "-d", output_dir, "-p", output] if verbose and is_makeappx: args.append("/verbose") joined = " ".join(shlex_quote(arg) for arg in args) log(logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}") sys.stdout.flush() # Otherwise the subprocess output can be interleaved. if verbose: subprocess.check_call(args, universal_newlines=True) else: # Suppress output unless we fail. try: subprocess.check_output(args, universal_newlines=True) except subprocess.CalledProcessError as e: sys.stderr.write(e.output) raise return output
def sign_msix(output, force=False, log=None, verbose=False): """Sign an MSIX with a locally generated self-signed certificate.""" # TODO: sign on non-Windows hosts. if sys.platform != "win32": raise Exception("sign msix only works on Windows") powershell_exe = find_sdk_tool("powershell.exe", log=log) if not powershell_exe: raise ValueError("powershell is required; " "set POWERSHELL or PATH") def powershell(argstring, check=True): "Invoke `powershell.exe`. Arguments are given as a string to allow consumer to quote." args = [powershell_exe, "-c", argstring] joined = " ".join(shlex_quote(arg) for arg in args) log( logging.INFO, "msix", {"args": args, "joined": joined}, "Invoking: {joined}" ) return subprocess.run( args, check=check, universal_newlines=True, capture_output=True ).stdout signtool = find_sdk_tool("signtool.exe", log=log) if not signtool: raise ValueError( "signtool is required; " "set SIGNTOOL or WINDOWSSDKDIR or PATH" ) # Our first order of business is to find, or generate, a (self-signed) # certificate. # These are baked into enough places under `browser/` that we need not # extract constants. vendor = "Mozilla" publisher = "CN=Mozilla Corporation, OU=MSIX Packaging" friendly_name = "Mozilla Corporation MSIX Packaging Test Certificate" # The convention is $MOZBUILD_STATE_PATH/cache/$FEATURE. crt_path = mozpath.join( get_state_dir(), "cache", "mach-msix", "{}.crt".format(friendly_name).replace(" ", "_").lower(), ) crt_path = mozpath.abspath(crt_path) ensureParentDir(crt_path) pfx_path = crt_path.replace(".crt", ".pfx") # TODO: maybe use an actual password. For now, just something that won't be # brute-forced. password = "******" if force or not os.path.isfile(crt_path): log( logging.INFO, "msix", {"crt_path": crt_path}, "Creating new self signed certificate at: {}".format(crt_path), ) thumbprints = [ thumbprint.strip() for thumbprint in powershell( ( "Get-ChildItem -Path Cert:\CurrentUser\My" '| Where-Object {{$_.Subject -Match "{}"}}' '| Where-Object {{$_.FriendlyName -Match "{}"}}' "| Select-Object -ExpandProperty Thumbprint" ).format(vendor, friendly_name) ).splitlines() ] if len(thumbprints) > 1: raise Exception( "Multiple certificates with friendly name found: {}".format( friendly_name ) ) if len(thumbprints) == 1: thumbprint = thumbprints[0] else: thumbprint = None if not thumbprint: thumbprint = ( powershell( ( 'New-SelfSignedCertificate -Type Custom -Subject "{}" ' '-KeyUsage DigitalSignature -FriendlyName "{}"' " -CertStoreLocation Cert:\CurrentUser\My" ' -TextExtension @("2.5.29.37={{text}}1.3.6.1.5.5.7.3.3", ' '"2.5.29.19={{text}}")' "| Select-Object -ExpandProperty Thumbprint" ).format(publisher, friendly_name) ) .strip() .upper() ) if not thumbprint: raise Exception( "Failed to find or create certificate with friendly name: {}".format( friendly_name ) ) powershell( 'Export-Certificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"'.format( thumbprint, crt_path ) ) log( logging.INFO, "msix", {"crt_path": crt_path}, "Exported public certificate: {crt_path}", ) powershell( ( 'Export-PfxCertificate -Cert Cert:\CurrentUser\My\{} -FilePath "{}"' ' -Password (ConvertTo-SecureString -String "{}" -Force -AsPlainText)' ).format(thumbprint, pfx_path, password) ) log( logging.INFO, "msix", {"pfx_path": pfx_path}, "Exported private certificate: {pfx_path}", ) # Second, to find the right thumbprint to use. We do this here in case # we're coming back to an existing certificate. log( logging.INFO, "msix", {"crt_path": crt_path}, "Signing with existing self signed certificate: {crt_path}", ) thumbprints = [ thumbprint.strip() for thumbprint in powershell( 'Get-PfxCertificate -FilePath "{}" | Select-Object -ExpandProperty Thumbprint'.format( crt_path ) ).splitlines() ] if len(thumbprints) > 1: raise Exception("Multiple thumbprints found for PFX: {}".format(pfx_path)) if len(thumbprints) == 0: raise Exception("No thumbprints found for PFX: {}".format(pfx_path)) thumbprint = thumbprints[0] log( logging.INFO, "msix", {"thumbprint": thumbprint}, "Signing with certificate with thumbprint: {thumbprint}", ) # Third, do the actual signing. args = [ signtool, "sign", "/a", "/fd", "SHA256", "/f", pfx_path, "/p", password, output, ] if not verbose: subprocess.check_call(args, universal_newlines=True) else: # Suppress output unless we fail. try: subprocess.check_output(args, universal_newlines=True) except subprocess.CalledProcessError as e: sys.stderr.write(e.output) raise # As a convenience to the user, tell how to use this certificate if it's not # already trusted, and how to work with MSIX files more generally. if verbose: root_thumbprints = [ root_thumbprint.strip() for root_thumbprint in powershell( "Get-ChildItem -Path Cert:\LocalMachine\Root\{} " "| Select-Object -ExpandProperty Thumbprint".format(thumbprint), check=False, ).splitlines() ] if thumbprint not in root_thumbprints: log( logging.INFO, "msix", {"thumbprint": thumbprint}, "Certificate with thumbprint not found in trusted roots: {thumbprint}", ) log( logging.INFO, "msix", {"crt_path": crt_path, "output": output}, r"""\ # Usage To trust this certificate (requires an elevated shell): powershell -c 'Import-Certificate -FilePath "{crt_path}" -Cert Cert:\LocalMachine\Root\' To verify this MSIX signature exists and is trusted: powershell -c 'Get-AuthenticodeSignature -FilePath "{output}" | Format-List *' To install this MSIX: powershell -c 'Add-AppPackage -path "{output}"' To see details after installing: powershell -c 'Get-AppPackage -name Mozilla.MozillaFirefox(Beta,...)' """.strip(), ) return 0
def android_geckoview_docs(self, archive, upload, upload_branch, upload_message, variant): def capitalize(s): # Can't use str.capitalize because it lower cases trailing letters. return (s[0].upper() + s[1:]) if s else '' task = 'geckoview:javadoc' + ('Jar' if archive or upload else '') + capitalize(variant) ret = self.gradle([task], verbose=True) if ret or not upload: return ret # Upload to Github. fmt = { 'level': os.environ.get('MOZ_SCM_LEVEL', '0'), 'project': os.environ.get('MH_BRANCH', 'unknown'), 'revision': os.environ.get('GECKO_HEAD_REV', 'tip'), } env = {} # In order to push to GitHub from TaskCluster, we store a private key # in the TaskCluster secrets store in the format {"content": "<KEY>"}, # and the corresponding public key as a writable deploy key for the # destination repo on GitHub. secret = os.environ.get('GECKOVIEW_DOCS_UPLOAD_SECRET', '').format(**fmt) if secret: # Set up a private key from the secrets store if applicable. import requests req = requests.get('http://taskcluster/secrets/v1/secret/' + secret) req.raise_for_status() keyfile = mozpath.abspath('gv-docs-upload-key') with open(keyfile, 'w') as f: os.chmod(keyfile, 0o600) f.write(req.json()['secret']['content']) # Turn off strict host key checking so ssh does not complain about # unknown github.com host. We're not pushing anything sensitive, so # it's okay to not check GitHub's host keys. env['GIT_SSH_COMMAND'] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile # Clone remote repo. branch, _, branch_path = upload_branch.partition('/') repo_url = '[email protected]:%s.git' % upload repo_path = mozpath.abspath('gv-docs-repo') self.run_process([ 'git', 'clone', '--branch', branch, '--depth', '1', repo_url, repo_path ], append_env=env, pass_thru=True) env['GIT_DIR'] = mozpath.join(repo_path, '.git') env['GIT_WORK_TREE'] = repo_path env['GIT_AUTHOR_NAME'] = env[ 'GIT_COMMITTER_NAME'] = 'GeckoView Docs Bot' env['GIT_AUTHOR_EMAIL'] = env[ 'GIT_COMMITTER_EMAIL'] = '*****@*****.**' # Extract new javadoc to specified directory inside repo. import mozfile src_tar = mozpath.join(self.topobjdir, 'gradle', 'build', 'mobile', 'android', 'geckoview', 'libs', 'geckoview-javadoc.jar') dst_path = mozpath.join(repo_path, branch_path.format(**fmt)) mozfile.remove(dst_path) mozfile.extract_zip(src_tar, dst_path) # Commit and push. self.run_process(['git', 'add', '--all'], append_env=env, pass_thru=True) if self.run_process(['git', 'diff', '--cached', '--quiet'], append_env=env, pass_thru=True, ensure_exit_code=False) != 0: # We have something to commit. self.run_process( ['git', 'commit', '--message', upload_message.format(**fmt)], append_env=env, pass_thru=True) self.run_process(['git', 'push', 'origin', 'gh-pages'], append_env=env, pass_thru=True) mozfile.remove(repo_path) if secret: mozfile.remove(keyfile) return 0
class TestChecksConfigure(unittest.TestCase): def test_checking(self): def make_test(to_exec): def test(val, msg): out = StringIO() sandbox = ConfigureSandbox({}, stdout=out, stderr=out) base_dir = os.path.join(topsrcdir, "build", "moz.configure") sandbox.include_file(os.path.join(base_dir, "checks.configure")) exec_(to_exec, sandbox) sandbox["foo"](val) self.assertEqual(out.getvalue(), msg) return test test = make_test( textwrap.dedent(""" @checking('for a thing') def foo(value): return value """)) test(True, "checking for a thing... yes\n") test(False, "checking for a thing... no\n") test(42, "checking for a thing... 42\n") test("foo", "checking for a thing... foo\n") data = ["foo", "bar"] test(data, "checking for a thing... %r\n" % data) # When the function given to checking does nothing interesting, the # behavior is not altered test = make_test( textwrap.dedent(""" @checking('for a thing', lambda x: x) def foo(value): return value """)) test(True, "checking for a thing... yes\n") test(False, "checking for a thing... no\n") test(42, "checking for a thing... 42\n") test("foo", "checking for a thing... foo\n") data = ["foo", "bar"] test(data, "checking for a thing... %r\n" % data) test = make_test( textwrap.dedent(""" def munge(x): if not x: return 'not found' if isinstance(x, (str, bool, int)): return x return ' '.join(x) @checking('for a thing', munge) def foo(value): return value """)) test(True, "checking for a thing... yes\n") test(False, "checking for a thing... not found\n") test(42, "checking for a thing... 42\n") test("foo", "checking for a thing... foo\n") data = ["foo", "bar"] test(data, "checking for a thing... foo bar\n") KNOWN_A = ensure_exe_extension(mozpath.abspath("/usr/bin/known-a")) KNOWN_B = ensure_exe_extension(mozpath.abspath("/usr/local/bin/known-b")) KNOWN_C = ensure_exe_extension(mozpath.abspath("/home/user/bin/known c")) OTHER_A = ensure_exe_extension(mozpath.abspath("/lib/other/known-a")) def get_result( self, command="", args=[], environ={}, prog="/bin/configure", extra_paths=None, includes=("util.configure", "checks.configure"), ): config = {} out = StringIO() paths = { self.KNOWN_A: None, self.KNOWN_B: None, self.KNOWN_C: None, } if extra_paths: paths.update(extra_paths) environ = dict(environ) if "PATH" not in environ: environ["PATH"] = os.pathsep.join( os.path.dirname(p) for p in paths) paths[self.OTHER_A] = None sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args, out, out) base_dir = os.path.join(topsrcdir, "build", "moz.configure") for f in includes: sandbox.include_file(os.path.join(base_dir, f)) status = 0 try: exec_(command, sandbox) sandbox.run() except SystemExit as e: status = e.code return config, out.getvalue(), status def test_check_prog(self): config, out, status = self.get_result( 'check_prog("FOO", ("known-a",))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_B}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_B) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) self.assertEqual( out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for unknown ERROR: Cannot find foo """), ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for unknown DEBUG: foo: Looking for unknown-2 DEBUG: foo: Looking for 'unknown 3' ERROR: Cannot find foo """), ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' "allow_missing=True)") self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, "checking for foo... not found\n") @unittest.skipIf(not sys.platform.startswith("win"), "Windows-only test") def test_check_prog_exe(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a.exe"]) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % os.path.splitext(self.KNOWN_A)[0]], ) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) def test_check_prog_with_args(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=known-a"]) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % self.KNOWN_A], ) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) path = self.KNOWN_B.replace("known-b", "known-a") config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ["FOO=%s" % path]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for %s ERROR: Cannot find foo """) % path, ) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))', ["FOO=known c"]) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": fake_short_path(self.KNOWN_C)}) self.assertEqual( out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' "allow_missing=True)", ["FOO=unknown"], ) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for foo... not found DEBUG: foo: Looking for unknown ERROR: Cannot find foo """), ) def test_check_prog_what(self): config, out, status = self.get_result( 'check_prog("CC", ("known-a",), what="the target C compiler")') self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_A}) self.assertEqual( out, "checking for the target C compiler... %s\n" % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),' ' what="the target C compiler")') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for the target C compiler... not found DEBUG: cc: Looking for unknown DEBUG: cc: Looking for unknown-2 DEBUG: cc: Looking for 'unknown 3' ERROR: Cannot find the target C compiler """), ) def test_check_prog_input(self): config, out, status = self.get_result( textwrap.dedent(""" option("--with-ccache", nargs=1, help="ccache") check_prog("CCACHE", ("known-a",), input="--with-ccache") """), ["--with-ccache=known-b"], ) self.assertEqual(status, 0) self.assertEqual(config, {"CCACHE": self.KNOWN_B}) self.assertEqual(out, "checking for ccache... %s\n" % self.KNOWN_B) script = textwrap.dedent(""" option(env="CC", nargs=1, help="compiler") @depends("CC") def compiler(value): return value[0].split()[0] if value else None check_prog("CC", ("known-a",), input=compiler) """) config, out, status = self.get_result(script) self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_A}) self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) config, out, status = self.get_result(script, ["CC=known-b"]) self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_B}) self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B) config, out, status = self.get_result(script, ["CC=known-b -m32"]) self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_B}) self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_B) def test_check_prog_progs(self): config, out, status = self.get_result('check_prog("FOO", ())') self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, "") config, out, status = self.get_result('check_prog("FOO", ())', ["FOO=known-a"]) self.assertEqual(status, 0) self.assertEqual(config, {"FOO": self.KNOWN_A}) self.assertEqual(out, "checking for foo... %s\n" % self.KNOWN_A) script = textwrap.dedent(""" option(env="TARGET", nargs=1, default="linux", help="target") @depends("TARGET") def compiler(value): if value: if value[0] == "linux": return ("gcc", "clang") if value[0] == "winnt": return ("cl", "clang-cl") check_prog("CC", compiler) """) config, out, status = self.get_result(script) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for cc... not found DEBUG: cc: Looking for gcc DEBUG: cc: Looking for clang ERROR: Cannot find cc """), ) config, out, status = self.get_result(script, ["TARGET=linux"]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for cc... not found DEBUG: cc: Looking for gcc DEBUG: cc: Looking for clang ERROR: Cannot find cc """), ) config, out, status = self.get_result(script, ["TARGET=winnt"]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for cc... not found DEBUG: cc: Looking for cl DEBUG: cc: Looking for clang-cl ERROR: Cannot find cc """), ) config, out, status = self.get_result(script, ["TARGET=none"]) self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, "") config, out, status = self.get_result(script, ["TARGET=winnt", "CC=known-a"]) self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_A}) self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) config, out, status = self.get_result(script, ["TARGET=none", "CC=known-a"]) self.assertEqual(status, 0) self.assertEqual(config, {"CC": self.KNOWN_A}) self.assertEqual(out, "checking for cc... %s\n" % self.KNOWN_A) def test_check_prog_configure_error(self): with self.assertRaises(ConfigureError) as e: self.get_result('check_prog("FOO", "foo")') self.assertEqual(str(e.exception), "progs must resolve to a list or tuple!") with self.assertRaises(ConfigureError) as e: self.get_result('foo = depends(when=True)(lambda: ("a", "b"))\n' 'check_prog("FOO", ("known-a",), input=foo)') self.assertEqual( str(e.exception), "input must resolve to a tuple or a list with a " "single element, or a string", ) with self.assertRaises(ConfigureError) as e: self.get_result('foo = depends(when=True)(lambda: {"a": "b"})\n' 'check_prog("FOO", ("known-a",), input=foo)') self.assertEqual( str(e.exception), "input must resolve to a tuple or a list with a " "single element, or a string", ) def test_check_prog_with_path(self): config, out, status = self.get_result( 'check_prog("A", ("known-a",), paths=["/some/path"])') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for a... not found DEBUG: a: Looking for known-a ERROR: Cannot find a """), ) config, out, status = self.get_result( 'check_prog("A", ("known-a",), paths=["%s"])' % os.path.dirname(self.OTHER_A)) self.assertEqual(status, 0) self.assertEqual(config, {"A": self.OTHER_A}) self.assertEqual( out, textwrap.dedent("""\ checking for a... %s """ % self.OTHER_A), ) dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A)) config, out, status = self.get_result( textwrap.dedent("""\ check_prog("A", ("known-a",), paths=["%s"]) """ % os.pathsep.join(dirs))) self.assertEqual(status, 0) self.assertEqual(config, {"A": self.OTHER_A}) self.assertEqual( out, textwrap.dedent("""\ checking for a... %s """ % self.OTHER_A), ) dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B)) config, out, status = self.get_result( textwrap.dedent("""\ check_prog("A", ("known-a",), paths=["%s", "%s"]) """ % (os.pathsep.join(dirs), self.OTHER_A))) self.assertEqual(status, 0) self.assertEqual(config, {"A": self.KNOWN_A}) self.assertEqual( out, textwrap.dedent("""\ checking for a... %s """ % self.KNOWN_A), ) config, out, status = self.get_result( 'check_prog("A", ("known-a",), paths="%s")' % os.path.dirname(self.OTHER_A)) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ checking for a... """ # noqa # trailing whitespace... """ DEBUG: a: Looking for known-a ERROR: Paths provided to find_program must be a list of strings, not %r """ % mozpath.dirname(self.OTHER_A)), ) @unittest.skipIf( not sys.platform.startswith("linux"), "Linux-only test, assumes Java is located from a $PATH", ) def test_java_tool_checks_linux(self): def run_configure_java(mock_fs_paths, mock_java_home=None, mock_path=None, args=[]): script = textwrap.dedent("""\ @depends('--help') def host(_): return namespace(os='unknown') include('%(topsrcdir)s/build/moz.configure/java.configure') """ % {"topsrcdir": topsrcdir}) def mock_which(exe, path=None): for mock_fs_path in mock_fs_paths.keys(): (base, filename) = os.path.split(mock_fs_path) if filename == exe: if path and os.path.normpath(base) != os.path.normpath( path): continue return mock_fs_path # Don't let system JAVA_HOME influence the test original_java_home = os.environ.pop("JAVA_HOME", None) configure_environ = {} if mock_java_home: os.environ["JAVA_HOME"] = mock_java_home configure_environ["JAVA_HOME"] = mock_java_home if mock_path: configure_environ["PATH"] = mock_path # * Don't attempt to invoke Java, just resolve each mock Java's version as "1.8" # * Even if the real file sysphabtem has a symlink at the mocked path, don't let # realpath follow it, as it may influence the test. # * When finding a binary, check the mock paths rather than the real filesystem. # Note: Python doesn't allow the different "with" bits to be put in parenthesis, # because then it thinks it's an un-with-able tuple. Additionally, if this is cleanly # lined up with "\", black removes them and autoformats them to the block that is # below. with patch("mozboot.util._resolve_java_version", return_value="1.8"), patch( "os.path.realpath", side_effect=lambda path: path), patch( "mozboot.util.which", side_effect=mock_which): result = self.get_result( args=args, command=script, extra_paths=paths, environ=configure_environ, ) if original_java_home: os.environ["JAVA_HOME"] = original_java_home return result java = mozpath.abspath("/usr/bin/java") javac = mozpath.abspath("/usr/bin/javac") paths = {java: None, javac: None} config, out, status = run_configure_java(paths) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % java), ) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath("/usr/local/bin/java") alt_javac = mozpath.abspath("/usr/local/bin/javac") alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths = {alt_java: None, alt_javac: None, java: None, javac: None} alt_path = mozpath.dirname(java) config, out, status = run_configure_java(paths, alt_java_home, alt_path) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = run_configure_java( paths, mock_path=mozpath.dirname(java), args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = run_configure_java( paths, mock_java_home=mozpath.dirname(mozpath.dirname(java)), mock_path=mozpath.dirname(java), args=["--with-java-bin-path=%s" % mozpath.dirname(alt_java)], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": alt_java, "MOZ_JAVA_CODE_COVERAGE": False, }, ) self.assertEqual( out, textwrap.dedent("""\ checking for java... %s """ % alt_java), ) # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE. alt_java_home = mozpath.dirname(mozpath.dirname(java)) config, out, status = run_configure_java( paths, mock_java_home=alt_java_home, mock_path=mozpath.dirname(java), args=["--enable-java-coverage"], ) self.assertEqual(status, 0) self.assertEqual( config, { "JAVA": java, "MOZ_JAVA_CODE_COVERAGE": True, }, ) # Any missing tool is fatal when these checks run. paths = {} config, out, status = run_configure_java( mock_fs_paths={}, mock_path=mozpath.dirname(java), args=["--enable-java-coverage"], ) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual( out, textwrap.dedent("""\ ERROR: Could not find "java" on the $PATH. Please install the Java 1.8 JDK \ and/or set $JAVA_HOME. """), ) def test_pkg_check_modules(self): mock_pkg_config_version = "0.10.0" mock_pkg_config_path = mozpath.abspath("/usr/bin/pkg-config") def mock_pkg_config(_, args): if args[0:2] == ("--errors-to-stdout", "--print-errors"): assert len(args) == 3 package = args[2] if package == "unknown": return ( 1, "Package unknown was not found in the pkg-config search path.\n" "Perhaps you should add the directory containing `unknown.pc'\n" "to the PKG_CONFIG_PATH environment variable\n" "No package 'unknown' found", "", ) if package == "valid": return 0, "", "" if package == "new > 1.1": return 1, "Requested 'new > 1.1' but version of new is 1.1", "" if args[0] == "--cflags": assert len(args) == 2 return 0, "-I/usr/include/%s" % args[1], "" if args[0] == "--libs": assert len(args) == 2 return 0, "-l%s" % args[1], "" if args[0] == "--version": return 0, mock_pkg_config_version, "" self.fail("Unexpected arguments to mock_pkg_config: %s" % (args, )) def get_result(cmd, args=[], extra_paths=None): return self.get_result( textwrap.dedent("""\ option('--disable-compile-environment', help='compile env') compile_environment = depends(when='--enable-compile-environment')(lambda: True) toolchain_prefix = depends(when=True)(lambda: None) include('%(topsrcdir)s/build/moz.configure/util.configure') include('%(topsrcdir)s/build/moz.configure/checks.configure') include('%(topsrcdir)s/build/moz.configure/pkg.configure') """ % {"topsrcdir": topsrcdir}) + cmd, args=args, extra_paths=extra_paths, includes=(), ) extra_paths = { mock_pkg_config_path: mock_pkg_config, } config, output, status = get_result( "pkg_check_modules('MOZ_VALID', 'valid')") self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... not found ERROR: *** The pkg-config script could not be found. Make sure it is *** in your path, or set the PKG_CONFIG environment variable *** to the full path to pkg-config. """), ) config, output, status = get_result( "pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... %s checking for pkg-config version... %s checking for valid... yes checking MOZ_VALID_CFLAGS... -I/usr/include/valid checking MOZ_VALID_LIBS... -lvalid """ % (mock_pkg_config_path, mock_pkg_config_version)), ) self.assertEqual( config, { "PKG_CONFIG": mock_pkg_config_path, "MOZ_VALID_CFLAGS": ("-I/usr/include/valid", ), "MOZ_VALID_LIBS": ("-lvalid", ), }, ) config, output, status = get_result( "pkg_check_modules('MOZ_UKNOWN', 'unknown')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... %s checking for pkg-config version... %s checking for unknown... no ERROR: Package unknown was not found in the pkg-config search path. ERROR: Perhaps you should add the directory containing `unknown.pc' ERROR: to the PKG_CONFIG_PATH environment variable ERROR: No package 'unknown' found """ % (mock_pkg_config_path, mock_pkg_config_version)), ) self.assertEqual( config, { "PKG_CONFIG": mock_pkg_config_path, }, ) config, output, status = get_result( "pkg_check_modules('MOZ_NEW', 'new > 1.1')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no ERROR: Requested 'new > 1.1' but version of new is 1.1 """ % (mock_pkg_config_path, mock_pkg_config_version)), ) self.assertEqual( config, { "PKG_CONFIG": mock_pkg_config_path, }, ) # allow_missing makes missing packages non-fatal. cmd = textwrap.dedent("""\ have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True) @depends(have_new_module) def log_new_module_error(mod): if mod is not True: log.info('Module not found.') """) config, output, status = get_result(cmd, extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no WARNING: Requested 'new > 1.1' but version of new is 1.1 Module not found. """ % (mock_pkg_config_path, mock_pkg_config_version)), ) self.assertEqual( config, { "PKG_CONFIG": mock_pkg_config_path, }, ) config, output, status = get_result( cmd, args=["--disable-compile-environment"], extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, "Module not found.\n") self.assertEqual(config, {}) def mock_old_pkg_config(_, args): if args[0] == "--version": return 0, "0.8.10", "" self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args) extra_paths = { mock_pkg_config_path: mock_old_pkg_config, } config, output, status = get_result( "pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for pkg_config... %s checking for pkg-config version... 0.8.10 ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer. """ % mock_pkg_config_path), ) def test_simple_keyfile(self): includes = ("util.configure", "checks.configure", "keyfiles.configure") config, output, status = self.get_result( "simple_keyfile('Mozilla API')", includes=includes) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... no """), ) self.assertEqual( config, { "MOZ_MOZILLA_API_KEY": "no-mozilla-api-key", }, ) config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=["--with-mozilla-api-keyfile=/foo/bar/does/not/exist"], includes=includes, ) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... no ERROR: '/foo/bar/does/not/exist': No such file or directory. """), ) self.assertEqual(config, {}) with MockedOpen({"key": ""}): config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=["--with-mozilla-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... no ERROR: 'key' is empty. """), ) self.assertEqual(config, {}) with MockedOpen({"key": "fake-key\n"}): config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=["--with-mozilla-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... yes """), ) self.assertEqual( config, { "MOZ_MOZILLA_API_KEY": "fake-key", }, ) with MockedOpen({"default": "default-key\n"}): config, output, status = self.get_result( "simple_keyfile('Mozilla API', default='default')", includes=includes) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... yes """), ) self.assertEqual( config, { "MOZ_MOZILLA_API_KEY": "default-key", }, ) with MockedOpen({"default": "default-key\n", "key": "fake-key\n"}): config, output, status = self.get_result( "simple_keyfile('Mozilla API', default='key')", includes=includes) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Mozilla API key... yes """), ) self.assertEqual( config, { "MOZ_MOZILLA_API_KEY": "fake-key", }, ) def test_id_and_secret_keyfile(self): includes = ("util.configure", "checks.configure", "keyfiles.configure") config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", includes=includes) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... no """), ) self.assertEqual( config, { "MOZ_BING_API_CLIENTID": "no-bing-api-clientid", "MOZ_BING_API_KEY": "no-bing-api-key", }, ) config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=["--with-bing-api-keyfile=/foo/bar/does/not/exist"], includes=includes, ) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... no ERROR: '/foo/bar/does/not/exist': No such file or directory. """), ) self.assertEqual(config, {}) with MockedOpen({"key": ""}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=["--with-bing-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... no ERROR: 'key' is empty. """), ) self.assertEqual(config, {}) with MockedOpen({"key": "fake-id fake-key\n"}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=["--with-bing-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... yes """), ) self.assertEqual( config, { "MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key", }, ) with MockedOpen({"key": "fake-key\n"}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=["--with-bing-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 1) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... no ERROR: Bing API key file has an invalid format. """), ) self.assertEqual(config, {}) with MockedOpen({"default-key": "default-id default-key\n"}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API', default='default-key')", includes=includes, ) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... yes """), ) self.assertEqual( config, { "MOZ_BING_API_CLIENTID": "default-id", "MOZ_BING_API_KEY": "default-key", }, ) with MockedOpen({ "default-key": "default-id default-key\n", "key": "fake-id fake-key\n" }): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API', default='default-key')", args=["--with-bing-api-keyfile=key"], includes=includes, ) self.assertEqual(status, 0) self.assertEqual( output, textwrap.dedent("""\ checking for the Bing API key... yes """), ) self.assertEqual( config, { "MOZ_BING_API_CLIENTID": "fake-id", "MOZ_BING_API_KEY": "fake-key", }, )
def android_geckoview_docs(self, archive, upload, upload_branch, javadoc_path, upload_message): tasks = (self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_ARCHIVE_TASKS'] if archive or upload else self.substs['GRADLE_ANDROID_GECKOVIEW_DOCS_TASKS']) ret = self.gradle(tasks, verbose=True) if ret or not upload: return ret # Upload to Github. fmt = { 'level': os.environ.get('MOZ_SCM_LEVEL', '0'), 'project': os.environ.get('MH_BRANCH', 'unknown'), 'revision': os.environ.get('GECKO_HEAD_REV', 'tip'), } env = {} # In order to push to GitHub from TaskCluster, we store a private key # in the TaskCluster secrets store in the format {"content": "<KEY>"}, # and the corresponding public key as a writable deploy key for the # destination repo on GitHub. secret = os.environ.get('GECKOVIEW_DOCS_UPLOAD_SECRET', '').format(**fmt) if secret: # Set up a private key from the secrets store if applicable. import requests req = requests.get('http://taskcluster/secrets/v1/secret/' + secret) req.raise_for_status() keyfile = mozpath.abspath('gv-docs-upload-key') with open(keyfile, 'w') as f: os.chmod(keyfile, 0o600) f.write(req.json()['secret']['content']) # Turn off strict host key checking so ssh does not complain about # unknown github.com host. We're not pushing anything sensitive, so # it's okay to not check GitHub's host keys. env['GIT_SSH_COMMAND'] = 'ssh -i "%s" -o StrictHostKeyChecking=no' % keyfile # Clone remote repo. branch = upload_branch.format(**fmt) repo_url = '[email protected]:%s.git' % upload repo_path = mozpath.abspath('gv-docs-repo') self.run_process(['git', 'clone', '--branch', upload_branch, '--depth', '1', repo_url, repo_path], append_env=env, pass_thru=True) env['GIT_DIR'] = mozpath.join(repo_path, '.git') env['GIT_WORK_TREE'] = repo_path env['GIT_AUTHOR_NAME'] = env['GIT_COMMITTER_NAME'] = 'GeckoView Docs Bot' env['GIT_AUTHOR_EMAIL'] = env['GIT_COMMITTER_EMAIL'] = '*****@*****.**' # Copy over user documentation. import mozfile # Remove existing geckoview docs and replace with the local copy. # Keep all the files that are git specific and not part of the GV documentation. keep_files = [".git", ".gitignore", "_site", "CODE_OF_CONDUCT.md", "Gemfile.lock", "README.md"] for filename in os.listdir(repo_path): if filename not in keep_files: filepath = mozpath.join(repo_path, filename) mozfile.remove(filepath) src_path = mozpath.join(self.topsrcdir, 'mobile', 'android', 'docs', 'geckoview') os.system("rsync -aruz {}/ {}/".format(src_path, repo_path)) # Extract new javadoc to specified directory inside repo. src_tar = mozpath.join(self.topobjdir, 'gradle', 'build', 'mobile', 'android', 'geckoview', 'libs', 'geckoview-javadoc.jar') dst_path = mozpath.join(repo_path, javadoc_path.format(**fmt)) mozfile.remove(dst_path) mozfile.extract_zip(src_tar, dst_path) # Commit and push. self.run_process(['git', 'add', '--all'], append_env=env, pass_thru=True) if self.run_process(['git', 'diff', '--cached', '--quiet'], append_env=env, pass_thru=True, ensure_exit_code=False) != 0: # We have something to commit. self.run_process(['git', 'commit', '--message', upload_message.format(**fmt)], append_env=env, pass_thru=True) self.run_process(['git', 'push', 'origin', branch], append_env=env, pass_thru=True) mozfile.remove(repo_path) if secret: mozfile.remove(keyfile) return 0
import logging import os import sys from collections import defaultdict from mozbuild.base import MozbuildObject from mozlint.pathutils import findobject from mozlint.parser import Parser from mozlint.result import ResultSummary from mozlog.structuredlog import StructuredLogger from mozpack import path import pytest here = path.abspath(path.dirname(__file__)) build = MozbuildObject.from_environment(cwd=here, virtualenv_name="python-test") lintdir = path.dirname(here) sys.path.insert(0, lintdir) logger = logging.getLogger("mozlint") def pytest_generate_tests(metafunc): """Finds, loads and returns the config for the linter name specified by the LINTER global variable in the calling module. This implies that each test file (that uses this fixture) should only be used to test a single linter. If no LINTER variable is defined, the test will fail.
class TestChecksConfigure(unittest.TestCase): def test_checking(self): out = StringIO() sandbox = ConfigureSandbox({}, stdout=out, stderr=out) base_dir = os.path.join(topsrcdir, 'build', 'moz.configure') sandbox.include_file(os.path.join(base_dir, 'checks.configure')) exec_(textwrap.dedent(''' @checking('for a thing') def foo(value): return value '''), sandbox) foo = sandbox['foo'] foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... no\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) data = ['foo', 'bar'] foo(data) self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data) # When the function given to checking does nothing interesting, the # behavior is not altered exec_(textwrap.dedent(''' @checking('for a thing', lambda x: x) def foo(value): return value '''), sandbox) foo = sandbox['foo'] out.truncate(0) foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... no\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) data = ['foo', 'bar'] foo(data) self.assertEqual(out.getvalue(), 'checking for a thing... %r\n' % data) exec_(textwrap.dedent(''' def munge(x): if not x: return 'not found' if isinstance(x, (str, bool, int)): return x return ' '.join(x) @checking('for a thing', munge) def foo(value): return value '''), sandbox) foo = sandbox['foo'] out.truncate(0) foo(True) self.assertEqual(out.getvalue(), 'checking for a thing... yes\n') out.truncate(0) foo(False) self.assertEqual(out.getvalue(), 'checking for a thing... not found\n') out.truncate(0) foo(42) self.assertEqual(out.getvalue(), 'checking for a thing... 42\n') out.truncate(0) foo('foo') self.assertEqual(out.getvalue(), 'checking for a thing... foo\n') out.truncate(0) foo(['foo', 'bar']) self.assertEqual(out.getvalue(), 'checking for a thing... foo bar\n') KNOWN_A = ensure_exe_extension(mozpath.abspath('/usr/bin/known-a')) KNOWN_B = ensure_exe_extension(mozpath.abspath('/usr/local/bin/known-b')) KNOWN_C = ensure_exe_extension(mozpath.abspath('/home/user/bin/known c')) OTHER_A = ensure_exe_extension(mozpath.abspath('/lib/other/known-a')) def get_result(self, command='', args=[], environ={}, prog='/bin/configure', extra_paths=None, includes=('util.configure', 'checks.configure')): config = {} out = StringIO() paths = { self.KNOWN_A: None, self.KNOWN_B: None, self.KNOWN_C: None, } if extra_paths: paths.update(extra_paths) environ = dict(environ) if 'PATH' not in environ: environ['PATH'] = os.pathsep.join(os.path.dirname(p) for p in paths) paths[self.OTHER_A] = None sandbox = ConfigureTestSandbox(paths, config, environ, [prog] + args, out, out) base_dir = os.path.join(topsrcdir, 'build', 'moz.configure') for f in includes: sandbox.include_file(os.path.join(base_dir, f)) status = 0 try: exec_(command, sandbox) sandbox.run() except SystemExit as e: status = e.code return config, out.getvalue(), status def test_check_prog(self): config, out, status = self.get_result( 'check_prog("FOO", ("known-a",))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_B}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_B) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "known c"))') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': fake_short_path(self.KNOWN_C)}) self.assertEqual(out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown ERROR: Cannot find foo ''')) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"))') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown DEBUG: foo: Trying unknown-2 DEBUG: foo: Trying 'unknown 3' ERROR: Cannot find foo ''')) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' 'allow_missing=True)') self.assertEqual(status, 0) self.assertEqual(config, {'FOO': ':'}) self.assertEqual(out, 'checking for foo... not found\n') @unittest.skipIf(not sys.platform.startswith('win'), 'Windows-only test') def test_check_prog_exe(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=known-a.exe']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % os.path.splitext(self.KNOWN_A)[0]]) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) def test_check_prog_with_args(self): config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % self.KNOWN_A]) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) path = self.KNOWN_B.replace('known-b', 'known-a') config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "known-b", "known c"))', ['FOO=%s' % path]) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying %s ERROR: Cannot find foo ''') % path) config, out, status = self.get_result( 'check_prog("FOO", ("unknown",))', ['FOO=known c']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': fake_short_path(self.KNOWN_C)}) self.assertEqual(out, "checking for foo... %s\n" % shell_quote(fake_short_path(self.KNOWN_C))) config, out, status = self.get_result( 'check_prog("FOO", ("unknown", "unknown-2", "unknown 3"), ' 'allow_missing=True)', ['FOO=unknown']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for foo... not found DEBUG: foo: Trying unknown ERROR: Cannot find foo ''')) def test_check_prog_what(self): config, out, status = self.get_result( 'check_prog("CC", ("known-a",), what="the target C compiler")') self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual( out, 'checking for the target C compiler... %s\n' % self.KNOWN_A) config, out, status = self.get_result( 'check_prog("CC", ("unknown", "unknown-2", "unknown 3"),' ' what="the target C compiler")') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for the target C compiler... not found DEBUG: cc: Trying unknown DEBUG: cc: Trying unknown-2 DEBUG: cc: Trying 'unknown 3' ERROR: Cannot find the target C compiler ''')) def test_check_prog_input(self): config, out, status = self.get_result(textwrap.dedent(''' option("--with-ccache", nargs=1, help="ccache") check_prog("CCACHE", ("known-a",), input="--with-ccache") '''), ['--with-ccache=known-b']) self.assertEqual(status, 0) self.assertEqual(config, {'CCACHE': self.KNOWN_B}) self.assertEqual(out, 'checking for ccache... %s\n' % self.KNOWN_B) script = textwrap.dedent(''' option(env="CC", nargs=1, help="compiler") @depends("CC") def compiler(value): return value[0].split()[0] if value else None check_prog("CC", ("known-a",), input=compiler) ''') config, out, status = self.get_result(script) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) config, out, status = self.get_result(script, ['CC=known-b']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_B}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B) config, out, status = self.get_result(script, ['CC=known-b -m32']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_B}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_B) def test_check_prog_progs(self): config, out, status = self.get_result( 'check_prog("FOO", ())') self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, '') config, out, status = self.get_result( 'check_prog("FOO", ())', ['FOO=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'FOO': self.KNOWN_A}) self.assertEqual(out, 'checking for foo... %s\n' % self.KNOWN_A) script = textwrap.dedent(''' option(env="TARGET", nargs=1, default="linux", help="target") @depends("TARGET") def compiler(value): if value: if value[0] == "linux": return ("gcc", "clang") if value[0] == "winnt": return ("cl", "clang-cl") check_prog("CC", compiler) ''') config, out, status = self.get_result(script) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying gcc DEBUG: cc: Trying clang ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=linux']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying gcc DEBUG: cc: Trying clang ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=winnt']) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for cc... not found DEBUG: cc: Trying cl DEBUG: cc: Trying clang-cl ERROR: Cannot find cc ''')) config, out, status = self.get_result(script, ['TARGET=none']) self.assertEqual(status, 0) self.assertEqual(config, {}) self.assertEqual(out, '') config, out, status = self.get_result(script, ['TARGET=winnt', 'CC=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) config, out, status = self.get_result(script, ['TARGET=none', 'CC=known-a']) self.assertEqual(status, 0) self.assertEqual(config, {'CC': self.KNOWN_A}) self.assertEqual(out, 'checking for cc... %s\n' % self.KNOWN_A) def test_check_prog_configure_error(self): with self.assertRaises(ConfigureError) as e: self.get_result('check_prog("FOO", "foo")') self.assertEqual(e.exception.message, 'progs must resolve to a list or tuple!') with self.assertRaises(ConfigureError) as e: self.get_result( 'foo = depends(when=True)(lambda: ("a", "b"))\n' 'check_prog("FOO", ("known-a",), input=foo)' ) self.assertEqual(e.exception.message, 'input must resolve to a tuple or a list with a ' 'single element, or a string') with self.assertRaises(ConfigureError) as e: self.get_result( 'foo = depends(when=True)(lambda: {"a": "b"})\n' 'check_prog("FOO", ("known-a",), input=foo)' ) self.assertEqual(e.exception.message, 'input must resolve to a tuple or a list with a ' 'single element, or a string') def test_check_prog_with_path(self): config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["/some/path"])') self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for a... not found DEBUG: a: Trying known-a ERROR: Cannot find a ''')) config, out, status = self.get_result('check_prog("A", ("known-a",), paths=["%s"])' % os.path.dirname(self.OTHER_A)) self.assertEqual(status, 0) self.assertEqual(config, {'A': self.OTHER_A}) self.assertEqual(out, textwrap.dedent('''\ checking for a... %s ''' % self.OTHER_A)) dirs = map(mozpath.dirname, (self.OTHER_A, self.KNOWN_A)) config, out, status = self.get_result(textwrap.dedent('''\ check_prog("A", ("known-a",), paths=["%s"]) ''' % os.pathsep.join(dirs))) self.assertEqual(status, 0) self.assertEqual(config, {'A': self.OTHER_A}) self.assertEqual(out, textwrap.dedent('''\ checking for a... %s ''' % self.OTHER_A)) dirs = map(mozpath.dirname, (self.KNOWN_A, self.KNOWN_B)) config, out, status = self.get_result(textwrap.dedent('''\ check_prog("A", ("known-a",), paths=["%s", "%s"]) ''' % (os.pathsep.join(dirs), self.OTHER_A))) self.assertEqual(status, 0) self.assertEqual(config, {'A': self.KNOWN_A}) self.assertEqual(out, textwrap.dedent('''\ checking for a... %s ''' % self.KNOWN_A)) config, out, status = self.get_result('check_prog("A", ("known-a",), paths="%s")' % os.path.dirname(self.OTHER_A)) self.assertEqual(status, 1) self.assertEqual(config, {}) self.assertEqual(out, textwrap.dedent('''\ checking for a... DEBUG: a: Trying known-a ERROR: Paths provided to find_program must be a list of strings, not %r ''' % mozpath.dirname(self.OTHER_A))) def test_java_tool_checks(self): # A valid set of tools in a standard location. java = mozpath.abspath('/usr/bin/java') jarsigner = mozpath.abspath('/usr/bin/jarsigner') keytool = mozpath.abspath('/usr/bin/keytool') paths = { java: None, jarsigner: None, keytool: None, } script = textwrap.dedent('''\ @depends('--help') def host(_): return namespace(os='unknown') include('%(topsrcdir)s/build/moz.configure/java.configure') ''' % {'topsrcdir': topsrcdir}) config, out, status = self.get_result(command=script, extra_paths=paths) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (java, jarsigner, keytool))) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath('/usr/local/bin/java') alt_jarsigner = mozpath.abspath('/usr/local/bin/jarsigner') alt_keytool = mozpath.abspath('/usr/local/bin/keytool') alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths.update({ alt_java: None, alt_jarsigner: None, alt_keytool: None, }) config, out, status = self.get_result(command=script, extra_paths=paths, environ={ 'JAVA_HOME': alt_java_home, 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = self.get_result( command=script, args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = self.get_result( command=script, args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': alt_java, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'MOZ_JAVA_CODE_COVERAGE': False, }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... %s checking for keytool... %s ''' % (alt_java, alt_jarsigner, alt_keytool))) # --enable-java-coverage should set MOZ_JAVA_CODE_COVERAGE. config, out, status = self.get_result( command=script, args=['--enable-java-coverage'], extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), }) self.assertEqual(status, 0) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, 'MOZ_JAVA_CODE_COVERAGE': True, }) # Any missing tool is fatal when these checks run. del paths[jarsigner] config, out, status = self.get_result(command=script, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java) }) self.assertEqual(status, 1) self.assertEqual(config, { 'JAVA': java, 'JARSIGNER': ':', }) self.assertEqual(out, textwrap.dedent('''\ checking for java... %s checking for jarsigner... not found ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}' ''' % (java))) def test_pkg_check_modules(self): mock_pkg_config_version = '0.10.0' mock_pkg_config_path = mozpath.abspath('/usr/bin/pkg-config') def mock_pkg_config(_, args): if args[0:2] == ['--errors-to-stdout', '--print-errors']: assert len(args) == 3 package = args[2] if package == 'unknown': return (1, "Package unknown was not found in the pkg-config search path.\n" "Perhaps you should add the directory containing `unknown.pc'\n" "to the PKG_CONFIG_PATH environment variable\n" "No package 'unknown' found", '') if package == 'valid': return 0, '', '' if package == 'new > 1.1': return 1, "Requested 'new > 1.1' but version of new is 1.1", '' if args[0] == '--cflags': assert len(args) == 2 return 0, '-I/usr/include/%s' % args[1], '' if args[0] == '--libs': assert len(args) == 2 return 0, '-l%s' % args[1], '' if args[0] == '--version': return 0, mock_pkg_config_version, '' self.fail("Unexpected arguments to mock_pkg_config: %s" % args) def get_result(cmd, args=[], extra_paths=None): return self.get_result(textwrap.dedent('''\ option('--disable-compile-environment', help='compile env') compile_environment = depends(when='--enable-compile-environment')(lambda: True) toolchain_prefix = depends(when=True)(lambda: None) include('%(topsrcdir)s/build/moz.configure/util.configure') include('%(topsrcdir)s/build/moz.configure/checks.configure') include('%(topsrcdir)s/build/moz.configure/pkg.configure') ''' % {'topsrcdir': topsrcdir}) + cmd, args=args, extra_paths=extra_paths, includes=()) extra_paths = { mock_pkg_config_path: mock_pkg_config, } includes = ('util.configure', 'checks.configure', 'pkg.configure') config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')") self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... not found ERROR: *** The pkg-config script could not be found. Make sure it is *** in your path, or set the PKG_CONFIG environment variable *** to the full path to pkg-config. ''')) config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for valid... yes checking MOZ_VALID_CFLAGS... -I/usr/include/valid checking MOZ_VALID_LIBS... -lvalid ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, 'MOZ_VALID_CFLAGS': ('-I/usr/include/valid',), 'MOZ_VALID_LIBS': ('-lvalid',), }) config, output, status = get_result("pkg_check_modules('MOZ_UKNOWN', 'unknown')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for unknown... no ERROR: Package unknown was not found in the pkg-config search path. ERROR: Perhaps you should add the directory containing `unknown.pc' ERROR: to the PKG_CONFIG_PATH environment variable ERROR: No package 'unknown' found ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) config, output, status = get_result("pkg_check_modules('MOZ_NEW', 'new > 1.1')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no ERROR: Requested 'new > 1.1' but version of new is 1.1 ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) # allow_missing makes missing packages non-fatal. cmd = textwrap.dedent('''\ have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True) @depends(have_new_module) def log_new_module_error(mod): if mod is not True: log.info('Module not found.') ''') config, output, status = get_result(cmd, extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no WARNING: Requested 'new > 1.1' but version of new is 1.1 Module not found. ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) config, output, status = get_result(cmd, args=['--disable-compile-environment'], extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, 'Module not found.\n') self.assertEqual(config, {}) def mock_old_pkg_config(_, args): if args[0] == '--version': return 0, '0.8.10', '' self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args) extra_paths = { mock_pkg_config_path: mock_old_pkg_config, } config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... 0.8.10 ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer. ''' % mock_pkg_config_path)) def test_simple_keyfile(self): includes = ('util.configure', 'checks.configure', 'keyfiles.configure') config, output, status = self.get_result( "simple_keyfile('Mozilla API')", includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... no ''')) self.assertEqual(config, { 'MOZ_MOZILLA_API_KEY': 'no-mozilla-api-key', }) config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=['--with-mozilla-api-keyfile=/foo/bar/does/not/exist'], includes=includes) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... no ERROR: '/foo/bar/does/not/exist': No such file or directory. ''')) self.assertEqual(config, {}) with MockedOpen({'key': ''}): config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=['--with-mozilla-api-keyfile=key'], includes=includes) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... no ERROR: 'key' is empty. ''')) self.assertEqual(config, {}) with MockedOpen({'key': 'fake-key\n'}): config, output, status = self.get_result( "simple_keyfile('Mozilla API')", args=['--with-mozilla-api-keyfile=key'], includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... yes ''')) self.assertEqual(config, { 'MOZ_MOZILLA_API_KEY': 'fake-key', }) with MockedOpen({'default': 'default-key\n'}): config, output, status = self.get_result( "simple_keyfile('Mozilla API', default='default')", includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... yes ''')) self.assertEqual(config, { 'MOZ_MOZILLA_API_KEY': 'default-key', }) with MockedOpen({'default': 'default-key\n', 'key': 'fake-key\n'}): config, output, status = self.get_result( "simple_keyfile('Mozilla API', default='key')", includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Mozilla API key... yes ''')) self.assertEqual(config, { 'MOZ_MOZILLA_API_KEY': 'fake-key', }) def test_id_and_secret_keyfile(self): includes = ('util.configure', 'checks.configure', 'keyfiles.configure') config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... no ''')) self.assertEqual(config, { 'MOZ_BING_API_CLIENTID': 'no-bing-api-clientid', 'MOZ_BING_API_KEY': 'no-bing-api-key', }) config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=['--with-bing-api-keyfile=/foo/bar/does/not/exist'], includes=includes) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... no ERROR: '/foo/bar/does/not/exist': No such file or directory. ''')) self.assertEqual(config, {}) with MockedOpen({'key': ''}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=['--with-bing-api-keyfile=key'], includes=includes) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... no ERROR: 'key' is empty. ''')) self.assertEqual(config, {}) with MockedOpen({'key': 'fake-id fake-key\n'}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=['--with-bing-api-keyfile=key'], includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... yes ''')) self.assertEqual(config, { 'MOZ_BING_API_CLIENTID': 'fake-id', 'MOZ_BING_API_KEY': 'fake-key', }) with MockedOpen({'key': 'fake-key\n'}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API')", args=['--with-bing-api-keyfile=key'], includes=includes) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... no ERROR: Bing API key file has an invalid format. ''')) self.assertEqual(config, {}) with MockedOpen({'default-key': 'default-id default-key\n'}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API', default='default-key')", includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... yes ''')) self.assertEqual(config, { 'MOZ_BING_API_CLIENTID': 'default-id', 'MOZ_BING_API_KEY': 'default-key', }) with MockedOpen({'default-key': 'default-id default-key\n', 'key': 'fake-id fake-key\n'}): config, output, status = self.get_result( "id_and_secret_keyfile('Bing API', default='default-key')", args=['--with-bing-api-keyfile=key'], includes=includes) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for the Bing API key... yes ''')) self.assertEqual(config, { 'MOZ_BING_API_CLIENTID': 'fake-id', 'MOZ_BING_API_KEY': 'fake-key', })
def test_java_tool_checks(self): includes = ('util.configure', 'checks.configure', 'java.configure') # A valid set of tools in a standard location. java = mozpath.abspath('/usr/bin/java') javah = mozpath.abspath('/usr/bin/javah') javac = mozpath.abspath('/usr/bin/javac') jar = mozpath.abspath('/usr/bin/jar') jarsigner = mozpath.abspath('/usr/bin/jarsigner') keytool = mozpath.abspath('/usr/bin/keytool') proguard_jar = mozpath.abspath('/path/to/proguard.jar') old_proguard_jar = mozpath.abspath('/path/to/old_proguard.jar') def mock_valid_java(_, args): # Yield valid proguard.jar output with a version based on the given path. stdout = \ 'ProGuard, version {version}' + \ 'Usage: java proguard.ProGuard [options ...]' args = tuple(args) if args == ('-jar', proguard_jar): return 1, stdout.format(version="5.3.3"), '' elif args == ('-jar', old_proguard_jar): return 1, stdout.format(version="4.2"), '' self.fail("Unexpected arguments to mock_valid_java: %s" % args) def mock_valid_javac(_, args): if len(args) == 1 and args[0] == '-version': return 0, '1.8', '' self.fail("Unexpected arguments to mock_valid_javac: %s" % args) paths = { java: mock_valid_java, javah: None, javac: mock_valid_javac, jar: None, jarsigner: None, keytool: None, proguard_jar: mock_valid_java, } config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'PROGUARD_JAR': proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 checking for proguard.jar version... %s ''' % (java, javah, jar, jarsigner, keytool, javac, proguard_jar))) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': java, 'JAVAH': javah, 'JAVAC': javac, 'JAR': jar, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, 'PROGUARD_JAR': proguard_jar, }) # An alternative valid set of tools referred to by JAVA_HOME. alt_java = mozpath.abspath('/usr/local/bin/java') alt_javah = mozpath.abspath('/usr/local/bin/javah') alt_javac = mozpath.abspath('/usr/local/bin/javac') alt_jar = mozpath.abspath('/usr/local/bin/jar') alt_jarsigner = mozpath.abspath('/usr/local/bin/jarsigner') alt_keytool = mozpath.abspath('/usr/local/bin/keytool') alt_java_home = mozpath.dirname(mozpath.dirname(alt_java)) paths.update({ alt_java: mock_valid_java, alt_javah: None, alt_javac: mock_valid_javac, alt_jar: None, alt_jarsigner: None, alt_keytool: None, }) config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'JAVA_HOME': alt_java_home, 'PATH': mozpath.dirname(java), 'PROGUARD_JAR': proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 checking for proguard.jar version... %s ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac, proguard_jar))) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'PROGUARD_JAR': proguard_jar, }) # We can use --with-java-bin-path instead of JAVA_HOME to similar # effect. config, out, status = self.get_result( args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'PROGUARD_JAR': proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 checking for proguard.jar version... %s ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac, proguard_jar))) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'PROGUARD_JAR': proguard_jar, }) # If --with-java-bin-path and JAVA_HOME are both set, # --with-java-bin-path takes precedence. config, out, status = self.get_result( args=['--with-java-bin-path=%s' % mozpath.dirname(alt_java)], includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'JAVA_HOME': mozpath.dirname(mozpath.dirname(java)), 'PROGUARD_JAR': proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 checking for proguard.jar version... %s ''' % (alt_java, alt_javah, alt_jar, alt_jarsigner, alt_keytool, alt_javac, proguard_jar))) self.assertEqual(status, 0) self.assertEqual( config, { 'JAVA': alt_java, 'JAVAH': alt_javah, 'JAVAC': alt_javac, 'JAR': alt_jar, 'JARSIGNER': alt_jarsigner, 'KEYTOOL': alt_keytool, 'PROGUARD_JAR': proguard_jar, }) def mock_old_javac(_, args): if len(args) == 1 and args[0] == '-version': return 0, '1.6.9', '' self.fail("Unexpected arguments to mock_old_javac: %s" % args) # An old proguard JAR is fatal. config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'PROGUARD_JAR': old_proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... 1.8 checking for proguard.jar version... ERROR: proguard.jar 5.3.3 or higher is required (looked for %s). Run |mach artifact toolchain --from-build proguard-jar && mv proguard ~/.mozbuild/| or add `export PROGUARD_JAR=/path/to/proguard.jar` to your mozconfig. ''' % (java, javah, jar, jarsigner, keytool, javac, old_proguard_jar))) self.assertEqual(status, 1) self.assertEqual( config, { 'JAVA': java, 'JAVAH': javah, 'JAVAC': javac, 'JAR': jar, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, }) # An old javac is fatal. paths[javac] = mock_old_javac config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'PROGUARD_JAR': proguard_jar, }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... %s checking for keytool... %s checking for javac... %s checking for javac version... ERROR: javac 1.8 or higher is required (found 1.6.9). Check the JAVA_HOME environment variable. ''' % (java, javah, jar, jarsigner, keytool, javac))) self.assertEqual(status, 1) self.assertEqual( config, { 'JAVA': java, 'JAVAH': javah, 'JAVAC': javac, 'JAR': jar, 'JARSIGNER': jarsigner, 'KEYTOOL': keytool, }) # Any missing tool is fatal when these checks run. del paths[jarsigner] config, out, status = self.get_result(includes=includes, extra_paths=paths, environ={ 'PATH': mozpath.dirname(java), 'PROGUARD_JAR': proguard_jar, }) self.assertEqual(status, 1) self.assertEqual(config, { 'JAVA': java, 'JAVAH': javah, 'JAR': jar, 'JARSIGNER': ':', }) self.assertEqual( out, textwrap.dedent('''\ checking for java... %s checking for javah... %s checking for jar... %s checking for jarsigner... not found ERROR: The program jarsigner was not found. Set $JAVA_HOME to your Java SDK directory or use '--with-java-bin-path={java-bin-dir}' ''' % (java, javah, jar)))
def test_pkg_check_modules(self): mock_pkg_config_version = '0.10.0' mock_pkg_config_path = mozpath.abspath('/usr/bin/pkg-config') def mock_pkg_config(_, args): if args[0:2] == ['--errors-to-stdout', '--print-errors']: assert len(args) == 3 package = args[2] if package == 'unknown': return (1, "Package unknown was not found in the pkg-config search path.\n" "Perhaps you should add the directory containing `unknown.pc'\n" "to the PKG_CONFIG_PATH environment variable\n" "No package 'unknown' found", '') if package == 'valid': return 0, '', '' if package == 'new > 1.1': return 1, "Requested 'new > 1.1' but version of new is 1.1", '' if args[0] == '--cflags': assert len(args) == 2 return 0, '-I/usr/include/%s' % args[1], '' if args[0] == '--libs': assert len(args) == 2 return 0, '-l%s' % args[1], '' if args[0] == '--version': return 0, mock_pkg_config_version, '' self.fail("Unexpected arguments to mock_pkg_config: %s" % args) def get_result(cmd, args=[], extra_paths=None): return self.get_result(textwrap.dedent('''\ option('--disable-compile-environment', help='compile env') compile_environment = depends(when='--enable-compile-environment')(lambda: True) toolchain_prefix = depends(when=True)(lambda: None) include('%(topsrcdir)s/build/moz.configure/util.configure') include('%(topsrcdir)s/build/moz.configure/checks.configure') include('%(topsrcdir)s/build/moz.configure/pkg.configure') ''' % {'topsrcdir': topsrcdir}) + cmd, args=args, extra_paths=extra_paths, includes=()) extra_paths = { mock_pkg_config_path: mock_pkg_config, } includes = ('util.configure', 'checks.configure', 'pkg.configure') config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')") self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... not found ERROR: *** The pkg-config script could not be found. Make sure it is *** in your path, or set the PKG_CONFIG environment variable *** to the full path to pkg-config. ''')) config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for valid... yes checking MOZ_VALID_CFLAGS... -I/usr/include/valid checking MOZ_VALID_LIBS... -lvalid ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, 'MOZ_VALID_CFLAGS': ('-I/usr/include/valid',), 'MOZ_VALID_LIBS': ('-lvalid',), }) config, output, status = get_result("pkg_check_modules('MOZ_UKNOWN', 'unknown')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for unknown... no ERROR: Package unknown was not found in the pkg-config search path. ERROR: Perhaps you should add the directory containing `unknown.pc' ERROR: to the PKG_CONFIG_PATH environment variable ERROR: No package 'unknown' found ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) config, output, status = get_result("pkg_check_modules('MOZ_NEW', 'new > 1.1')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no ERROR: Requested 'new > 1.1' but version of new is 1.1 ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) # allow_missing makes missing packages non-fatal. cmd = textwrap.dedent('''\ have_new_module = pkg_check_modules('MOZ_NEW', 'new > 1.1', allow_missing=True) @depends(have_new_module) def log_new_module_error(mod): if mod is not True: log.info('Module not found.') ''') config, output, status = get_result(cmd, extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... %s checking for new > 1.1... no WARNING: Requested 'new > 1.1' but version of new is 1.1 Module not found. ''' % (mock_pkg_config_path, mock_pkg_config_version))) self.assertEqual(config, { 'PKG_CONFIG': mock_pkg_config_path, }) config, output, status = get_result(cmd, args=['--disable-compile-environment'], extra_paths=extra_paths) self.assertEqual(status, 0) self.assertEqual(output, 'Module not found.\n') self.assertEqual(config, {}) def mock_old_pkg_config(_, args): if args[0] == '--version': return 0, '0.8.10', '' self.fail("Unexpected arguments to mock_old_pkg_config: %s" % args) extra_paths = { mock_pkg_config_path: mock_old_pkg_config, } config, output, status = get_result("pkg_check_modules('MOZ_VALID', 'valid')", extra_paths=extra_paths) self.assertEqual(status, 1) self.assertEqual(output, textwrap.dedent('''\ checking for pkg_config... %s checking for pkg-config version... 0.8.10 ERROR: *** Your version of pkg-config is too old. You need version 0.9.0 or newer. ''' % mock_pkg_config_path))
def __init__(self, topsrcdir, topobjdir, defines=None, non_global_defines=None, substs=None, source=None, mozconfig=None): if not source: source = mozpath.join(topobjdir, 'config.status') self.source = source self.defines = ReadOnlyDict(defines or {}) self.non_global_defines = non_global_defines or [] self.substs = dict(substs or {}) self.topsrcdir = mozpath.abspath(topsrcdir) self.topobjdir = mozpath.abspath(topobjdir) self.mozconfig = mozpath.abspath(mozconfig) if mozconfig else None self.lib_prefix = self.substs.get('LIB_PREFIX', '') self.rust_lib_prefix = self.substs.get('RUST_LIB_PREFIX', '') if 'LIB_SUFFIX' in self.substs: self.lib_suffix = '.%s' % self.substs['LIB_SUFFIX'] if 'RUST_LIB_SUFFIX' in self.substs: self.rust_lib_suffix = '.%s' % self.substs['RUST_LIB_SUFFIX'] self.dll_prefix = self.substs.get('DLL_PREFIX', '') self.dll_suffix = self.substs.get('DLL_SUFFIX', '') if self.substs.get('IMPORT_LIB_SUFFIX'): self.import_prefix = self.lib_prefix self.import_suffix = '.%s' % self.substs['IMPORT_LIB_SUFFIX'] else: self.import_prefix = self.dll_prefix self.import_suffix = self.dll_suffix self.bin_suffix = self.substs.get('BIN_SUFFIX', '') global_defines = [ name for name in self.defines if not name in self.non_global_defines ] self.substs['ACDEFINES'] = ' '.join([ '-D%s=%s' % (name, shell_quote(self.defines[name]).replace('$', '$$')) for name in sorted(global_defines) ]) def serialize(name, obj): if isinstance(obj, StringTypes): return obj if isinstance(obj, Iterable): return ' '.join(obj) raise Exception('Unhandled type %s for %s', type(obj), str(name)) self.substs['ALLSUBSTS'] = '\n'.join( sorted([ '%s = %s' % (name, serialize(name, self.substs[name])) for name in self.substs if self.substs[name] ])) self.substs['ALLEMPTYSUBSTS'] = '\n'.join( sorted([ '%s =' % name for name in self.substs if not self.substs[name] ])) self.substs = ReadOnlyDict(self.substs) self.external_source_dir = None external = self.substs.get('EXTERNAL_SOURCE_DIR', '') if external: external = mozpath.normpath(external) if not os.path.isabs(external): external = mozpath.join(self.topsrcdir, external) self.external_source_dir = mozpath.normpath(external) # Populate a Unicode version of substs. This is an optimization to make # moz.build reading faster, since each sandbox needs a Unicode version # of these variables and doing it over a thousand times is a hotspot # during sandbox execution! # Bug 844509 tracks moving everything to Unicode. self.substs_unicode = {} def decode(v): if not isinstance(v, text_type): try: return v.decode('utf-8') except UnicodeDecodeError: return v.decode('utf-8', 'replace') for k, v in self.substs.items(): if not isinstance(v, StringTypes): if isinstance(v, Iterable): type(v)(decode(i) for i in v) elif not isinstance(v, text_type): v = decode(v) self.substs_unicode[k] = v self.substs_unicode = ReadOnlyDict(self.substs_unicode)
def run_mochitest_general(self, flavor=None, test_objects=None, **kwargs): buildapp = None for app in SUPPORTED_APPS: if is_buildapp_in(app)(self): buildapp = app break flavors = None if flavor: for fname, fobj in ALL_FLAVORS.iteritems(): if flavor in fobj['aliases']: if buildapp not in fobj['enabled_apps']: continue flavors = [fname] break else: flavors = [f for f, v in ALL_FLAVORS.iteritems() if buildapp in v['enabled_apps']] from mozbuild.controller.building import BuildDriver self._ensure_state_subdir_exists('.') driver = self._spawn(BuildDriver) driver.install_tests(remove=False) test_paths = kwargs['test_paths'] kwargs['test_paths'] = [] if test_paths and buildapp == 'b2g': # In B2G there is often a 'gecko' directory, though topsrcdir is actually # elsewhere. This little hack makes test paths like 'gecko/dom' work, even if # GECKO_PATH is set in the .userconfig gecko_path = mozpath.abspath(mozpath.join(kwargs['b2gPath'], 'gecko')) if gecko_path != self.topsrcdir: new_paths = [] for tp in test_paths: if mozpath.abspath(tp).startswith(gecko_path): new_paths.append(mozpath.relpath(tp, gecko_path)) else: new_paths.append(tp) test_paths = new_paths mochitest = self._spawn(MochitestRunner) tests = mochitest.resolve_tests(test_paths, test_objects, cwd=self._mach_context.cwd) subsuite = kwargs.get('subsuite') if subsuite == 'default': kwargs['subsuite'] = None suites = defaultdict(list) unsupported = set() for test in tests: # Filter out non-mochitests and unsupported flavors. if test['flavor'] not in ALL_FLAVORS: continue key = (test['flavor'], test['subsuite']) if test['flavor'] not in flavors: unsupported.add(key) continue if subsuite == 'default': # "--subsuite default" means only run tests that don't have a subsuite if test['subsuite']: unsupported.add(key) continue elif subsuite and test['subsuite'] != subsuite: unsupported.add(key) continue suites[key].append(test) if not suites: # Make it very clear why no tests were found if not unsupported: print(TESTS_NOT_FOUND.format('\n'.join( sorted(list(test_paths or test_objects))))) return 1 msg = [] for f, s in unsupported: fobj = ALL_FLAVORS[f] apps = fobj['enabled_apps'] name = fobj['aliases'][0] if s: name = '{} --subsuite {}'.format(name, s) if buildapp not in apps: reason = 'requires {}'.format(' or '.join(apps)) else: reason = 'excluded by the command line' msg.append(' mochitest -f {} ({})'.format(name, reason)) print(SUPPORTED_TESTS_NOT_FOUND.format( buildapp, '\n'.join(sorted(msg)))) return 1 if buildapp in ('b2g', 'b2g_desktop'): run_mochitest = mochitest.run_b2g_test elif buildapp == 'android': run_mochitest = mochitest.run_android_test else: run_mochitest = mochitest.run_desktop_test overall = None for (flavor, subsuite), tests in sorted(suites.items()): fobj = ALL_FLAVORS[flavor] msg = fobj['aliases'][0] if subsuite: msg = '{} with subsuite {}'.format(msg, subsuite) print(NOW_RUNNING.format(msg)) harness_args = kwargs.copy() harness_args['subsuite'] = subsuite harness_args.update(fobj.get('extra_args', {})) result = run_mochitest( self._mach_context, tests=tests, suite=fobj['suite'], **harness_args) if result: overall = result # TODO consolidate summaries from all suites return overall
def consume_object(self, obj): if isinstance(obj, JARManifest) and \ obj.install_target.startswith('dist/bin'): self._consume_jar_manifest(obj) elif isinstance(obj, (FinalTargetFiles, FinalTargetPreprocessedFiles)) and \ obj.install_target.startswith('dist/bin'): ab_cd = self.environment.substs['MOZ_UI_LOCALE'][0] localized = isinstance( obj, (LocalizedFiles, LocalizedPreprocessedFiles)) defines = obj.defines or {} if defines: defines = defines.defines for path, files in obj.files.walk(): for f in files: # For localized files we need to find the file from the locale directory. if (localized and not isinstance(f, ObjDirPath) and ab_cd != 'en-US'): src = self.localized_path(obj.relsrcdir, f) dep_target = 'install-%s' % obj.install_target if '*' not in src: merge = mozpath.abspath( mozpath.join(self.environment.topobjdir, 'l10n_merge', obj.relsrcdir, f)) self._l10n_dependencies[dep_target].append( (merge, f.full_path, src)) src = merge else: src = f.full_path if isinstance(obj, FinalTargetPreprocessedFiles): self._add_preprocess(obj, src, path, target=f.target_basename, defines=defines) elif '*' in f: def _prefix(s): for p in mozpath.split(s): if '*' not in p: yield p + '/' prefix = ''.join(_prefix(src)) if '*' in f.target_basename: target = path else: target = mozpath.join(path, f.target_basename) mozpath.join(path, f.target_basename) self._install_manifests[obj.install_target] \ .add_pattern_link( prefix, src[len(prefix):], target) else: self._install_manifests[obj.install_target].add_link( src, mozpath.join(path, f.target_basename)) if isinstance(f, ObjDirPath): dep_target = 'install-%s' % obj.install_target dep = mozpath.relpath(f.full_path, self.environment.topobjdir) if dep in self._generated_files_map: # Only the first output file is specified as a # dependency. If there are multiple output files # from a single GENERATED_FILES invocation that are # installed, we only want to run the command once. dep = self._generated_files_map[dep] self._dependencies[dep_target].append(dep) elif isinstance(obj, ChromeManifestEntry) and \ obj.install_target.startswith('dist/bin'): top_level = mozpath.join(obj.install_target, 'chrome.manifest') if obj.path != top_level: entry = 'manifest %s' % mozpath.relpath( obj.path, obj.install_target) self._manifest_entries[top_level].add(entry) self._manifest_entries[obj.path].add(str(obj.entry)) elif isinstance(obj, GeneratedFile): if obj.outputs: first_output = mozpath.relpath( mozpath.join(obj.objdir, obj.outputs[0]), self.environment.topobjdir) for o in obj.outputs[1:]: fullpath = mozpath.join(obj.objdir, o) self._generated_files_map[mozpath.relpath( fullpath, self.environment.topobjdir)] = first_output self._generated_files.append(obj) return False elif isinstance(obj, XPIDLModule): self._has_xpidl = True # We're not actually handling XPIDL files. return False else: return False return True
def getmount(self, path): while path != '/' and not os.path.ismount(path): path = mozpath.abspath(mozpath.join(path, os.pardir)) return path
def process_gyp_result(gyp_result, gyp_dir_attrs, path, config, output, non_unified_sources, action_overrides): flat_list, targets, data = gyp_result no_chromium = gyp_dir_attrs.no_chromium no_unified = gyp_dir_attrs.no_unified # Process all targets from the given gyp files and its dependencies. # The path given to AllTargets needs to use os.sep, while the frontend code # gives us paths normalized with forward slash separator. for target in gyp.common.AllTargets(flat_list, targets, path.replace('/', os.sep)): build_file, target_name, toolset = gyp.common.ParseQualifiedTarget(target) # Each target is given its own objdir. The base of that objdir # is derived from the relative path from the root gyp file path # to the current build_file, placed under the given output # directory. Since several targets can be in a given build_file, # separate them in subdirectories using the build_file basename # and the target_name. reldir = mozpath.relpath(mozpath.dirname(build_file), mozpath.dirname(path)) subdir = '%s_%s' % ( mozpath.splitext(mozpath.basename(build_file))[0], target_name, ) # Emit a context for each target. context = GypContext(config, mozpath.relpath( mozpath.join(output, reldir, subdir), config.topobjdir)) context.add_source(mozpath.abspath(build_file)) # The list of included files returned by gyp are relative to build_file for f in data[build_file]['included_files']: context.add_source(mozpath.abspath(mozpath.join( mozpath.dirname(build_file), f))) spec = targets[target] # Derive which gyp configuration to use based on MOZ_DEBUG. c = 'Debug' if config.substs.get('MOZ_DEBUG') else 'Release' if c not in spec['configurations']: raise RuntimeError('Missing %s gyp configuration for target %s ' 'in %s' % (c, target_name, build_file)) target_conf = spec['configurations'][c] if 'actions' in spec: handle_actions(spec['actions'], context, action_overrides) if 'copies' in spec: handle_copies(spec['copies'], context) use_libs = [] libs = [] def add_deps(s): for t in s.get('dependencies', []) + s.get('dependencies_original', []): ty = targets[t]['type'] if ty in ('static_library', 'shared_library'): l = targets[t]['target_name'] if l not in use_libs: use_libs.append(l) # Manually expand out transitive dependencies-- # gyp won't do this for static libs or none targets. if ty in ('static_library', 'none'): add_deps(targets[t]) libs.extend(spec.get('libraries', [])) # XXX: this sucks, but webrtc breaks with this right now because # it builds a library called 'gtest' and we just get lucky # that it isn't in USE_LIBS by that name anywhere. if no_chromium: add_deps(spec) os_libs = [] for l in libs: if l.startswith('-'): if l not in os_libs: os_libs.append(l) elif l.endswith('.lib'): l = l[:-4] if l not in os_libs: os_libs.append(l) elif l: # For library names passed in from moz.build. l = os.path.basename(l) if l not in use_libs: use_libs.append(l) if spec['type'] == 'none': if not ('actions' in spec or 'copies' in spec): continue elif spec['type'] in ('static_library', 'shared_library', 'executable'): # Remove leading 'lib' from the target_name if any, and use as # library name. name = six.ensure_text(spec['target_name']) if spec['type'] in ('static_library', 'shared_library'): if name.startswith('lib'): name = name[3:] context['LIBRARY_NAME'] = name else: context['PROGRAM'] = name if spec['type'] == 'shared_library': context['FORCE_SHARED_LIB'] = True elif spec['type'] == 'static_library' and \ spec.get('variables', {}).get('no_expand_libs', '0') == '1': # PSM links a NSS static library, but our folded libnss # doesn't actually export everything that all of the # objects within would need, so that one library # should be built as a real static library. context['NO_EXPAND_LIBS'] = True if use_libs: context['USE_LIBS'] = sorted(use_libs, key=lambda s: s.lower()) if os_libs: context['OS_LIBS'] = os_libs # gyp files contain headers and asm sources in sources lists. sources = [] unified_sources = [] extensions = set() use_defines_in_asflags = False for f in spec.get('sources', []): ext = mozpath.splitext(f)[-1] extensions.add(ext) if f.startswith('$INTERMEDIATE_DIR/'): s = ObjDirPath(context, f.replace('$INTERMEDIATE_DIR/', '!')) else: s = SourcePath(context, f) if ext == '.h': continue if ext == '.def': context['SYMBOLS_FILE'] = s elif ext != '.S' and not no_unified and s not in non_unified_sources: unified_sources.append(s) else: sources.append(s) # The Mozilla build system doesn't use DEFINES for building # ASFILES. if ext == '.s': use_defines_in_asflags = True # The context expects alphabetical order when adding sources context['SOURCES'] = alphabetical_sorted(sources) context['UNIFIED_SOURCES'] = alphabetical_sorted(unified_sources) defines = target_conf.get('defines', []) if config.substs['CC_TYPE'] == 'clang-cl' and no_chromium: msvs_settings = gyp.msvs_emulation.MsvsSettings(spec, {}) defines.extend(msvs_settings.GetComputedDefines(c)) for define in defines: if '=' in define: name, value = define.split('=', 1) context['DEFINES'][name] = value else: context['DEFINES'][define] = True product_dir_dist = '$PRODUCT_DIR/dist/' for include in target_conf.get('include_dirs', []): if include.startswith(product_dir_dist): # special-case includes of <(PRODUCT_DIR)/dist/ to match # handle_copies above. This is used for NSS' exports. include = '!/dist/include/' + include[len(product_dir_dist):] elif include.startswith(config.topobjdir): # NSPR_INCLUDE_DIR gets passed into the NSS build this way. include = '!/' + mozpath.relpath(include, config.topobjdir) else: # moz.build expects all LOCAL_INCLUDES to exist, so ensure they do. # # NB: gyp files sometimes have actual absolute paths (e.g. # /usr/include32) and sometimes paths that moz.build considers # absolute, i.e. starting from topsrcdir. There's no good way # to tell them apart here, and the actual absolute paths are # likely bogus. In any event, actual absolute paths will be # filtered out by trying to find them in topsrcdir. # # We do allow !- and %-prefixed paths, assuming they come # from moz.build and will be handled the same way as if they # were given to LOCAL_INCLUDES in moz.build. if include.startswith('/'): resolved = mozpath.abspath(mozpath.join(config.topsrcdir, include[1:])) elif not include.startswith(('!', '%')): resolved = mozpath.abspath(mozpath.join( mozpath.dirname(build_file), include)) if not include.startswith(('!', '%')) and not os.path.exists(resolved): continue context['LOCAL_INCLUDES'] += [include] context['ASFLAGS'] = target_conf.get('asflags_mozilla', []) if use_defines_in_asflags and defines: context['ASFLAGS'] += ['-D' + d for d in defines] if config.substs['OS_TARGET'] == 'SunOS': context['LDFLAGS'] = target_conf.get('ldflags', []) flags = target_conf.get('cflags_mozilla', []) if flags: suffix_map = { '.c': 'CFLAGS', '.cpp': 'CXXFLAGS', '.cc': 'CXXFLAGS', '.m': 'CMFLAGS', '.mm': 'CMMFLAGS', } variables = ( suffix_map[e] for e in extensions if e in suffix_map ) for var in variables: for f in flags: # We may be getting make variable references out of the # gyp data, and we don't want those in emitted data, so # substitute them with their actual value. f = expand_variables(f, config.substs).split() if not f: continue # the result may be a string or a list. if isinstance(f, six.string_types): context[var].append(f) else: context[var].extend(f) else: # Ignore other types because we don't have # anything using them, and we're not testing them. They can be # added when that becomes necessary. raise NotImplementedError('Unsupported gyp target type: %s' % spec['type']) if not no_chromium: # Add some features to all contexts. Put here in case LOCAL_INCLUDES # order matters. context['LOCAL_INCLUDES'] += [ '!/ipc/ipdl/_ipdlheaders', '/ipc/chromium/src', '/ipc/glue', ] # These get set via VC project file settings for normal GYP builds. if config.substs['OS_TARGET'] == 'WINNT': context['DEFINES']['UNICODE'] = True context['DEFINES']['_UNICODE'] = True context['COMPILE_FLAGS']['OS_INCLUDES'] = [] for key, value in gyp_dir_attrs.sandbox_vars.items(): if context.get(key) and isinstance(context[key], list): # If we have a key from sanbox_vars that's also been # populated here we use the value from sandbox_vars as our # basis rather than overriding outright. context[key] = value + context[key] elif context.get(key) and isinstance(context[key], dict): context[key].update(value) else: context[key] = value yield context
from __future__ import absolute_import, unicode_literals import json import mozunit import mozpack.path as mozpath import pytest from mozlint.result import Issue, ResultSummary from mozlint import formatters NORMALISED_PATHS = { "abc": mozpath.normpath("a/b/c.txt"), "def": mozpath.normpath("d/e/f.txt"), "root": mozpath.abspath("/fake/root"), } EXPECTED = { "compact": { "kwargs": {}, "format": """ /fake/root/a/b/c.txt: line 1, Error - oh no foo (foo) /fake/root/a/b/c.txt: line 4, col 10, Error - oh no baz (baz) /fake/root/a/b/c.txt: line 5, Error - oh no foo-diff (foo-diff) /fake/root/d/e/f.txt: line 4, col 2, Warning - oh no bar (bar-not-allowed) 4 problems """.strip(), }, "stylish": {
def __init__(self, paths): self._paths = set(mozpath.abspath(p) for p in paths)
import mozpack.path as mozpath from mozwebidlcodegen import ( WebIDLCodegenManager, WebIDLCodegenManagerState, ) from mozfile import NamedTemporaryFile from mozunit import ( MockedOpen, main, ) OUR_DIR = mozpath.abspath(mozpath.dirname(__file__)) TOPSRCDIR = mozpath.normpath(mozpath.join(OUR_DIR, '..', '..', '..', '..')) class TestWebIDLCodegenManager(unittest.TestCase): TEST_STEMS = { 'Child', 'Parent', 'ExampleBinding', 'TestEvent', } @property def _static_input_paths(self): s = { mozpath.join(OUR_DIR, p)