def get_options(self) -> 'OptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003'] if version_compare(self.version, '>=4.4.0'): fortran_stds += ['f2008'] if version_compare(self.version, '>=8.0.0'): fortran_stds += ['f2018'] opts['std'].choices = ['none'] + fortran_stds # type: ignore return opts
def get_options(self) -> 'KeyedOptionDictType': opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003'] if version_compare(self.version, '>=4.4.0'): fortran_stds += ['f2008'] if version_compare(self.version, '>=8.0.0'): fortran_stds += ['f2018'] key = OptionKey('std', machine=self.for_machine, lang=self.language) opts[key].choices = ['none'] + fortran_stds return opts
def get_options(self): opts = FortranCompiler.get_options(self) fortran_stds = ['legacy', 'f95', 'f2003'] if version_compare(self.version, '>=4.4.0'): fortran_stds += ['f2008'] if version_compare(self.version, '>=8.0.0'): fortran_stds += ['f2018'] opts.update({'fortran_std': coredata.UserComboOption('Fortran language standard to use', ['none'] + fortran_stds, 'none')}) return opts
def test_modules(self): if self.backend is not Backend.ninja: raise SkipTest(f'C++ modules only work with the Ninja backend (not {self.backend.name}).') if 'VSCMD_VER' not in os.environ: raise SkipTest('C++ modules is only supported with Visual Studio.') if version_compare(os.environ['VSCMD_VER'], '<16.10.0'): raise SkipTest('C++ modules are only supported with VS 2019 Preview or newer.') self.init(os.path.join(self.unit_test_dir, '85 cpp modules')) self.build()
def check_mypy() -> None: try: import mypy except ImportError: print('Failed import mypy') sys.exit(1) from mypy.version import __version__ as mypy_version if not version_compare(mypy_version, '>=0.812'): print('mypy >=0.812 is required, older versions report spurious errors') sys.exit(1)
def _git_create_repo(self, path): # If a user has git configuration init.defaultBranch set we want to override that with tempfile.TemporaryDirectory() as d: out = git(['--version'], str(d))[1] if version_compare(search_version(out), '>= 2.28'): extra_cmd = ['--initial-branch', 'master'] else: extra_cmd = [] self._create_project(path) self._git(['init'] + extra_cmd, path) self._git_config(path) self._git(['add', '.'], path) self._git(['commit', '--no-gpg-sign', '-m', 'Initial commit'], path)
def generate_native_headers( self, state: ModuleState, args: T.Tuple[T.List[mesonlib.FileOrString]], kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: classes = T.cast('T.List[str]', kwargs.get('classes')) package = kwargs.get('package') headers: T.List[str] = [] for clazz in classes: underscore_clazz = clazz.replace(".", "_") if package: headers.append( f'{package.replace(".", "_")}_{underscore_clazz}.h') else: headers.append(f'{underscore_clazz}.h') javac = self.__get_java_compiler(state) command = mesonlib.listify([ javac.exelist, '-d', '@PRIVATE_DIR@', '-h', state.subdir, '@INPUT@', ]) prefix = classes[0] if not package else package target = CustomTarget(f'{prefix}-native-headers', state.subdir, state.subproject, state.environment, command, sources=args[0], outputs=headers, backend=state.backend) # It is only known that 1.8.0 won't pre-create the directory. 11 and 16 # do not exhibit this behavior. if version_compare(javac.version, '1.8.0'): pathlib.Path( state.backend.get_target_private_dir_abs(target)).mkdir( parents=True, exist_ok=True) return ModuleReturnValue(target, [target])
def generate_native_header( self, state: ModuleState, args: T.Tuple[T.Union[str, FileHolder]], kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: assert state.backend package = kwargs.get('package') file = self.interpreter.source_strings_to_files([ a.held_object if isinstance(a, FileHolder) else a for a in args ])[0] if package: header = f'{package.replace(".", "_")}_{pathlib.Path(file.fname).stem}.h' else: header = f'{pathlib.Path(file.fname).stem}.h' ct_kwargs = { 'input': file, 'output': header, 'command': [ self.javac.exelist[0], '-d', '@PRIVATE_DIR@', '-h', state.subdir, '@INPUT@', ] } target = CustomTarget(os.path.basename(header), state.subdir, state.subproject, backend=state.backend, kwargs=ct_kwargs) # It is only known that 1.8.0 won't pre-create the directory. 11 and 16 # do not exhibit this behavior. if version_compare(self.javac.version, '1.8.0'): pathlib.Path( state.backend.get_target_private_dir_abs(target)).mkdir( parents=True, exist_ok=True) return ModuleReturnValue(target, [target])
def generate_native_header( self, state: ModuleState, args: T.Tuple[T.Union[str, mesonlib.File]], kwargs: T.Dict[str, T.Optional[str]]) -> ModuleReturnValue: package = kwargs.get('package') if isinstance(args[0], mesonlib.File): file = args[0] else: file = mesonlib.File.from_source_file(state.source_root, state.subdir, args[0]) if package: header = f'{package.replace(".", "_")}_{pathlib.Path(file.fname).stem}.h' else: header = f'{pathlib.Path(file.fname).stem}.h' javac = self.__get_java_compiler(state) target = CustomTarget( os.path.basename(header), state.subdir, state.subproject, state.environment, mesonlib.listify([ javac.exelist, '-d', '@PRIVATE_DIR@', '-h', state.subdir, '@INPUT@', ]), [file], [header], backend=state.backend, ) # It is only known that 1.8.0 won't pre-create the directory. 11 and 16 # do not exhibit this behavior. if version_compare(javac.version, '1.8.0'): pathlib.Path( state.backend.get_target_private_dir_abs(target)).mkdir( parents=True, exist_ok=True) return ModuleReturnValue(target, [target])
def get_nagfor_quiet(version: str) -> T.List[str]: return ['-quiet'] if version_compare(version, '>=7100') else []
NINJA_1_9_OR_NEWER = False NINJA_CMD = None # If we're on CI, just assume we have ninja in PATH and it's new enough because # we provide that. This avoids having to detect ninja for every subprocess unit # test that we run. if 'CI' in os.environ: NINJA_1_9_OR_NEWER = True NINJA_CMD = 'ninja' else: # Look for 1.9 to see if https://github.com/ninja-build/ninja/issues/1219 # is fixed, else require 1.6 for -w dupbuild=err for v in ('1.9', '1.6'): NINJA_CMD = detect_ninja(v) if NINJA_CMD is not None: if mesonlib.version_compare(v, '>=1.9'): NINJA_1_9_OR_NEWER = True else: mlog.warning('Found ninja <1.9, tests will run slower', once=True) break if NINJA_CMD is None: raise RuntimeError('Could not find Ninja v1.6 or newer') def guess_backend(backend, msbuild_exe: str): # Auto-detect backend if unspecified backend_flags = [] if backend is None: if msbuild_exe is not None and (mesonlib.is_windows() and not _using_intelcl()):
def gather_tests(testdir: Path) -> T.List[TestDef]: tests = [t.name for t in testdir.iterdir() if t.is_dir()] tests = [t for t in tests if not t.startswith('.') ] # Filter non-tests files (dot files, etc) test_defs = [TestDef(testdir / t, None, []) for t in tests] all_tests = [] # type: T.List[TestDef] for t in test_defs: test_def_file = t.path / 'test.json' if not test_def_file.is_file(): all_tests += [t] continue test_def = json.loads(test_def_file.read_text()) # Handle additional environment variables env = {} # type: T.Dict[str, str] if 'env' in test_def: assert isinstance(test_def['env'], dict) env = test_def['env'] for key, val in env.items(): val = val.replace('@ROOT@', t.path.resolve().as_posix()) env[key] = val # Handle installed files installed = [] # type: T.List[InstalledFile] if 'installed' in test_def: installed = [InstalledFile(x) for x in test_def['installed']] # Handle the do_not_set_opts list do_not_set_opts = test_def.get('do_not_set_opts', []) # type: T.List[str] # Skip tests if the tool requirements are not met if 'tools' in test_def: assert isinstance(test_def['tools'], dict) for tool, vers_req in test_def['tools'].items(): if tool not in tool_vers_map: t.skip = True elif not mesonlib.version_compare(tool_vers_map[tool], vers_req): t.skip = True # Skip the matrix code and just update the existing test if 'matrix' not in test_def: t.env.update(env) t.installed_files = installed t.do_not_set_opts = do_not_set_opts all_tests += [t] continue # 'matrix; entry is present, so build multiple tests from matrix definition opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]] matrix = test_def['matrix'] assert "options" in matrix for key, val in matrix["options"].items(): assert isinstance(val, list) tmp_opts = [] # type: T.List[T.Tuple[str, bool]] for i in val: assert isinstance(i, dict) assert "val" in i skip = False # Add an empty matrix entry if i['val'] is None: tmp_opts += [(None, False)] continue # Skip the matrix entry if environment variable is present if 'skip_on_env' in i: for skip_env_var in i['skip_on_env']: if skip_env_var in os.environ: skip = True # Only run the test if all compiler ID's match if 'compilers' in i: for lang, id_list in i['compilers'].items(): if lang not in compiler_id_map or compiler_id_map[ lang] not in id_list: skip = True break tmp_opts += [('{}={}'.format(key, i['val']), skip)] if opt_list: new_opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]] for i in opt_list: for j in tmp_opts: new_opt_list += [[*i, j]] opt_list = new_opt_list else: opt_list = [[x] for x in tmp_opts] # Exclude specific configurations if 'exclude' in matrix: assert isinstance(matrix['exclude'], list) new_opt_list = [] # type: T.List[T.List[T.Tuple[str, bool]]] for i in opt_list: exclude = False opt_names = [x[0] for x in i] for j in matrix['exclude']: ex_list = ['{}={}'.format(k, v) for k, v in j.items()] if all([x in opt_names for x in ex_list]): exclude = True break if not exclude: new_opt_list += [i] opt_list = new_opt_list for i in opt_list: name = ' '.join([x[0] for x in i if x[0] is not None]) opts = ['-D' + x[0] for x in i if x[0] is not None] skip = any([x[1] for x in i]) test = TestDef(t.path, name, opts, skip or t.skip) test.env.update(env) test.installed_files = installed test.do_not_set_opts = do_not_set_opts all_tests += [test] return sorted(all_tests)
def coverage(outputs: T.List[str], source_root: str, subproject_root: str, build_root: str, log_dir: str, use_llvm_cov: bool) -> int: outfiles = [] exitcode = 0 (gcovr_exe, gcovr_version, lcov_exe, genhtml_exe, llvm_cov_exe) = environment.find_coverage_tools() # load config files for tools if available in the source tree # - lcov requires manually specifying a per-project config # - gcovr picks up the per-project config, and also supports filtering files # so don't exclude subprojects ourselves, if the project has a config, # because they either don't want that, or should set it themselves lcovrc = os.path.join(source_root, '.lcovrc') if os.path.exists(lcovrc): lcov_config = ['--config-file', lcovrc] else: lcov_config = [] gcovr_config = ['-e', re.escape(subproject_root)] # gcovr >= 4.2 requires a different syntax for out of source builds if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'): gcovr_base_cmd = [gcovr_exe, '-r', source_root, build_root] # it also started supporting the config file if os.path.exists(os.path.join(source_root, 'gcovr.cfg')): gcovr_config = [] else: gcovr_base_cmd = [gcovr_exe, '-r', build_root] if use_llvm_cov: gcov_exe_args = ['--gcov-executable', llvm_cov_exe + ' gcov'] else: gcov_exe_args = [] if not outputs or 'xml' in outputs: if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'): subprocess.check_call( gcovr_base_cmd + gcovr_config + ['-x', '-o', os.path.join(log_dir, 'coverage.xml')] + gcov_exe_args) outfiles.append(('Xml', pathlib.Path(log_dir, 'coverage.xml'))) elif outputs: print('gcovr >= 3.3 needed to generate Xml coverage report') exitcode = 1 if not outputs or 'sonarqube' in outputs: if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=4.2'): subprocess.check_call(gcovr_base_cmd + gcovr_config + [ '--sonarqube', '-o', os.path.join(log_dir, 'sonarqube.xml'), ] + gcov_exe_args) outfiles.append(('Sonarqube', pathlib.Path(log_dir, 'sonarqube.xml'))) elif outputs: print('gcovr >= 4.2 needed to generate Xml coverage report') exitcode = 1 if not outputs or 'text' in outputs: if gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'): subprocess.check_call( gcovr_base_cmd + gcovr_config + ['-o', os.path.join(log_dir, 'coverage.txt')] + gcov_exe_args) outfiles.append(('Text', pathlib.Path(log_dir, 'coverage.txt'))) elif outputs: print('gcovr >= 3.3 needed to generate text coverage report') exitcode = 1 if not outputs or 'html' in outputs: if lcov_exe and genhtml_exe: htmloutdir = os.path.join(log_dir, 'coveragereport') covinfo = os.path.join(log_dir, 'coverage.info') initial_tracefile = covinfo + '.initial' run_tracefile = covinfo + '.run' raw_tracefile = covinfo + '.raw' if use_llvm_cov: # Create a shim to allow using llvm-cov as a gcov tool. if mesonlib.is_windows(): llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.bat') with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_bat: llvm_cov_bat.write(f'@"{llvm_cov_exe}" gcov %*') else: llvm_cov_shim_path = os.path.join(log_dir, 'llvm-cov.sh') with open(llvm_cov_shim_path, 'w', encoding='utf-8') as llvm_cov_sh: llvm_cov_sh.write( f'#!/usr/bin/env sh\nexec "{llvm_cov_exe}" gcov $@' ) os.chmod( llvm_cov_shim_path, os.stat(llvm_cov_shim_path).st_mode | stat.S_IEXEC) gcov_tool_args = ['--gcov-tool', llvm_cov_shim_path] else: gcov_tool_args = [] subprocess.check_call([ lcov_exe, '--directory', build_root, '--capture', '--initial', '--output-file', initial_tracefile ] + lcov_config + gcov_tool_args) subprocess.check_call([ lcov_exe, '--directory', build_root, '--capture', '--output-file', run_tracefile, '--no-checksum', '--rc', 'lcov_branch_coverage=1' ] + lcov_config + gcov_tool_args) # Join initial and test results. subprocess.check_call([ lcov_exe, '-a', initial_tracefile, '-a', run_tracefile, '--rc', 'lcov_branch_coverage=1', '-o', raw_tracefile ] + lcov_config) # Remove all directories outside the source_root from the covinfo subprocess.check_call([ lcov_exe, '--extract', raw_tracefile, os.path.join(source_root, '*'), '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo ] + lcov_config) # Remove all directories inside subproject dir subprocess.check_call([ lcov_exe, '--remove', covinfo, os.path.join(subproject_root, '*'), '--rc', 'lcov_branch_coverage=1', '--output-file', covinfo ] + lcov_config) subprocess.check_call([ genhtml_exe, '--prefix', build_root, '--prefix', source_root, '--output-directory', htmloutdir, '--title', 'Code coverage', '--legend', '--show-details', '--branch-coverage', covinfo ]) outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html'))) elif gcovr_exe and mesonlib.version_compare(gcovr_version, '>=3.3'): htmloutdir = os.path.join(log_dir, 'coveragereport') if not os.path.isdir(htmloutdir): os.mkdir(htmloutdir) subprocess.check_call(gcovr_base_cmd + gcovr_config + [ '--html', '--html-details', '--print-summary', '-o', os.path.join(htmloutdir, 'index.html'), ]) outfiles.append(('Html', pathlib.Path(htmloutdir, 'index.html'))) elif outputs: print( 'lcov/genhtml or gcovr >= 3.3 needed to generate Html coverage report' ) exitcode = 1 if not outputs and not outfiles: print('Need gcovr or lcov/genhtml to generate any coverage reports') exitcode = 1 if outfiles: print('') for (filetype, path) in outfiles: print(filetype + ' coverage report can be found at', path.as_uri()) return exitcode