def _find_compiler(): compiler = None version = None platform = current_os() if platform != 'windows': # resolve CC and /usr/bin/cc for env_cc in (util.where(os.environ.get('CC', None)), util.where('cc')): if env_cc: cc, ccver = _compiler_version(env_cc) if cc and ccver: return cc, ccver # Try to find clang or gcc clang_path, clang_version = Toolchain.find_llvm_tool('clang') gcc_path, gcc_version = Toolchain.find_gcc_tool('gcc') if clang_path: compiler = 'clang' version = clang_version elif gcc_path: compiler = 'gcc' version = gcc_version else: print( 'Neither GCC or Clang could be found on this system, perhaps not installed yet?' ) else: compiler = 'msvc' version = Toolchain.find_msvc()[1] if not compiler or not version: print('WARNING: Default compiler could not be found') print('Default Compiler: {} {}'.format(compiler, version)) return compiler, version
def install(self, env): if self.installed: return sh = env.shell installed_path, installed_version = Toolchain.find_compiler( env.spec.compiler, env.spec.compiler_version) if installed_path: print('Compiler {} {} already exists at {}'.format( env.spec.compiler, installed_version, installed_path)) self.installed = True return sudo = env.config.get('sudo', current_os() == 'linux') sudo = ['sudo'] if sudo else [] version = env.toolchain.compiler_version.replace('\..+', '') script = tempfile.NamedTemporaryFile(delete=False) script_path = script.name script.write(LLVM_SH.encode()) script.close() # Make script executable os.chmod( script_path, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) sh.exec(*sudo, [script_path, version], check=True) self.installed = True
def validate_spec(build_spec): assert build_spec.host in HOSTS, "Host name {} is invalid".format( build_spec.host) assert build_spec.target in TARGETS, "Target {} is invalid".format( build_spec.target) assert build_spec.arch in ARCHS, "Architecture {} is invalid".format( build_spec.target) assert build_spec.compiler in COMPILERS, "Compiler {} is invalid".format( build_spec.compiler) compiler = COMPILERS[build_spec.compiler] assert build_spec.compiler_version in compiler[ 'versions'], "Compiler version {} is invalid for compiler {}".format( build_spec.compiler_version, build_spec.compiler) supported_hosts = compiler['hosts'] assert build_spec.host in supported_hosts or current_os( ) in supported_hosts, "Compiler {} does not support host {}".format( build_spec.compiler, build_spec.host) supported_targets = compiler['targets'] assert build_spec.target in supported_targets, "Compiler {} does not support target {}".format( build_spec.compiler, build_spec.target)
def __init__(self, dryrun=False): # Used in dry-run builds to track simulated working directory self._cwd = os.getcwd() # pushd/popd stack self.dir_stack = [] self.env_stack = [] self.dryrun = dryrun self.platform = current_os()
def install_node_via_nvm(self, env): sh = env.shell # Install node sh.exec(self.nvm, 'install', self.version, check=True) # Fetch path to installed node, add to PATH if current_os() != 'windows': result = sh.exec(self.nvm, 'which', self.version, check=True) node_path = os.path.dirname(result.output) sh.setenv( 'PATH', '{}{}{}'.format(node_path, os.pathsep, sh.getenv('PATH'))) sh.exec('node', '--version', check=True)
def export_compiler(compiler, env): if current_os() == 'windows': return if compiler != 'default': for cvar, evar in {'c': 'CC', 'cxx': 'CXX'}.items(): exe = env.config.get(cvar) if exe: compiler_path = env.shell.where(exe, resolve_symlinks=False) if compiler_path: env.shell.setenv(evar, compiler_path) else: print( 'WARNING: Compiler {} could not be found'.format(exe))
def __init__(self, **kwargs): for slot in ('host', 'target', 'arch', 'compiler', 'compiler_version'): setattr(self, slot, 'default') self.downstream = False spec = kwargs.get('spec', None) if spec: if spec.startswith('default'): # default or default(-{variant}) _, *rest = spec.split('-') elif not '-' in spec: # just a variant rest = [spec] else: # Parse the spec from a single string self.host, self.compiler, self.compiler_version, self.target, self.arch, * \ rest = spec.split('-') for variant in ('downstream', ): if variant in rest: setattr(self, variant, True) else: setattr(self, variant, False) # Pull out individual fields. Note this is not in an else to support overriding at construction time for slot in ('host', 'target', 'arch', 'compiler', 'compiler_version', 'downstream'): if slot in kwargs and kwargs[slot]: setattr(self, slot, kwargs[slot]) # Convert a target tuple into its component parts if '-' in self.target: self.target, self.arch = self.target.split('-') # Convert defaults to be based on running environment if self.host == 'default': self.host = current_host() if self.target == 'default': self.target = current_os() if self.arch == 'default': self.arch = current_arch() self.name = '-'.join([ self.host, self.compiler, self.compiler_version, self.target, self.arch ]) if self.downstream: self.name += "-downstream" validate_spec(self)
def install(self, env): if self.installed: return sh = env.shell self.install_dir = os.path.join(env.deps_dir, self.name) sh.mkdir(self.install_dir) if current_os() == 'windows': self.install_nvm_choco(env) else: self.install_nvm_sh(env) self.install_node_via_nvm(env) self.installed = True
def all_compilers(): """ Returns a list of tuples of all available (compiler, version) """ compilers = [] for version in _gcc_versions(): path, _version = Toolchain.find_gcc_tool('gcc', version) if path: compilers.append(('gcc', version)) for version in _clang_versions(): path, _version = Toolchain.find_llvm_tool('clang', version) if path: compilers.append(('clang', version)) if current_os() == 'windows': for version in _msvc_versions(): path, _version = Toolchain.find_msvc(version) if path: compilers.append(('msvc', version)) return compilers
def install(self, env): if self.installed: return sh = env.shell toolchain = env.toolchain sudo = env.config.get('sudo', current_os() == 'linux') sudo = ['sudo'] if sudo else [] print('Installing cross-compile via dockcross for {}'.format( toolchain.platform)) cross_compile_platform = env.config.get('cross_compile_platform', toolchain.platform) result = sh.exec('docker', 'run', 'dockcross/{}'.format(cross_compile_platform), quiet=True, check=True) # Strip off any output from docker itself output, shebang, script = result.output.partition('#!') script = shebang + script print(output) assert result.returncode == 0 dockcross = os.path.abspath( os.path.join(env.build_dir, 'dockcross-{}'.format(cross_compile_platform))) Path(dockcross).touch(0o755) with open(dockcross, "w+t") as f: f.write(script) sh.exec('chmod', 'a+x', dockcross) # Write out build_dir/dockcross.env file to init the dockcross env with # other code can add to this dockcross_env = os.path.join(env.build_dir, 'dockcross.env') with open(dockcross_env, "w+") as f: f.write('#env for dockcross\n') toolchain.env_file = dockcross_env toolchain.shell_env = [ dockcross, '-a', '--env-file={}'.format(dockcross_env) ] self.installed = True
def _compiler_version(cc): if current_os() != 'windows': result = util.run_command(cc, '--version', quiet=True, stderr=False) text = result.output # Apple clang m = re.match('Apple (LLVM|clang) version (\d+)', text) if m: return 'clang', m.group(2) # LLVM clang m = re.match('.*clang version (\d+)', text) if m: return 'clang', m.group(1) # GCC 4.x m = re.match('gcc .+ (4\.\d+)', text) if m: return 'gcc', m.group(1) # GCC 5+ m = re.match('gcc .+ (\d+)\.', text) if m: return 'gcc', m.group(1) return None, None
def produce_config(build_spec, project, overrides=None, **additional_variables): """ Traverse the configurations to produce one for the given spec """ host_os = current_os() defaults = { 'hosts': HOSTS, 'targets': TARGETS, 'compilers': COMPILERS, 'architectures': ARCHS, } # Build the list of config options to poll configs = UniqueList() # Processes a config object (could come from a file), searching for keys hosts, targets, and compilers def process_config(config, depth=0): def process_element(map, element_name, instance): if not map or not isinstance(map, dict): return element = map.get(element_name) # Some keys will just contain lists or scalars (e.g. hosts) if not element or not isinstance(element, dict): return new_config = element.get(instance) if not new_config: return configs.append(new_config) # recursively process config as long as sub-sections are found process_config(new_config, depth + 1) return new_config # Pull out any top level defaults if depth == 0: defaults = {} for key, value in config.items(): if key not in ('hosts', 'targets', 'compilers', 'architectures'): defaults[key] = value if len(defaults) > 0: configs.append(defaults) # pull out arch + any aliases archs = _arch_aliases(build_spec) for arch in archs: process_element(config, 'architectures', arch) # Get defaults from os (linux) then override with host (al2, manylinux, etc) if host_os != build_spec.host: process_element(config, 'hosts', host_os) process_element(config, 'hosts', build_spec.host) # pull out spec target to override process_element(config, 'targets', build_spec.target) # pull out spec compiler and version info compiler = process_element(config, 'compilers', build_spec.compiler) # Allow most specific resolves to come last process_element(compiler, 'versions', build_spec.compiler_version) # Process defaults first process_config(defaults) # process platform # target, arch -> platform target_platform = '{}-{}'.format(build_spec.target, build_spec.arch) configs.append(PLATFORMS[target_platform]) # then override with config file project_config = project.config process_config(project_config) new_version = { 'spec': build_spec, } # Iterate all keys and apply them for key, default in KEYS.items(): new_version[key] = default for config in configs: override_key = '!' + key apply_key = '+' + key if override_key in config: # Force Override new_version[key] = config[override_key] elif apply_key in config: # Force Apply _apply_value(new_version, key, config[apply_key]) elif key in config: # Project configs override defaults unless force applied if key in project_config and config[key] == project_config[key]: new_version[key] = config[key] else: # By default, merge all values (except strings) _apply_value(new_version, key, config[key]) new_version = _coalesce_pkg_options(build_spec, new_version) if overrides: for key, val in overrides.items(): new_version[key] = val # Default variables replacements = { 'host': build_spec.host, 'compiler': build_spec.compiler, 'version': build_spec.compiler_version, 'target': build_spec.target, 'arch': build_spec.arch, 'cwd': os.getcwd(), **additional_variables, } # Pull variables from the configs for config in configs: if 'variables' in config: variables = config['variables'] assert type(variables) == dict # Copy into the variables list for k, v in variables.items(): replacements[k] = v # Post process new_version = replace_variables(new_version, replacements) new_version['variables'] = replacements new_version['__processed'] = True return new_version
def parse_args(): parser = argparse.ArgumentParser() parser.add_argument( '-d', '--dry-run', action='store_true', help="Don't run the build, just print the commands that would run") parser.add_argument('-p', '--project', action='store', type=str, help="Project to work on") parser.add_argument('--config', type=str, default='RelWithDebInfo', help='The native code configuration to build with') parser.add_argument('--dump-config', action='store_true', help="Print the config in use before running a build") parser.add_argument('--spec', type=str) parser.add_argument('--build-dir', type=str, help='Directory to work in', default='.') parser.add_argument('-b', '--branch', help='Branch to build from') parser.add_argument('--cli_config', action='append', type=list) parser.add_argument('--compiler', type=str, help="The compiler to use for this build") parser.add_argument( '--target', type=str, help= "The target to cross-compile for (e.g. android-armv7, linux-x86, linux-aarch64)", default='{}-{}'.format(current_os(), current_arch()), choices=data.PLATFORMS.keys()) parser.add_argument('args', nargs=argparse.REMAINDER) # hand parse command and spec from within the args given command = None spec = None argv = sys.argv[1:] # eat command and optionally spec if argv and not argv[0].startswith('-'): command = argv.pop(0) if len(argv) >= 1 and not argv[0].startswith('-'): spec = argv.pop(0) if not command: print('No command provided, should be [build|inspect|<action-name>]') sys.exit(1) # pull out any k=v pairs config_vars = [] for arg in argv: m = re.match(r'^([A-Za-z_0-9]+)=(.+)', arg) if m: config_vars.append((m.group(1), m.group(2))) cli_config = {} for var in config_vars: cli_config[var[0]] = coerce_arg(var[1]) argv.remove('{}={}'.format(var[0], var[1])) # parse the args we know, put the rest in args.args for others to parse args, extras = parser.parse_known_args(argv) args.command = command args.cli_config = cli_config args.spec = args.spec if args.spec else spec # Backwards compat for `builder run $action` if args.command == 'run': args.command = args.spec args.spec = None # normalize target if args.target: args.target = normalize_target(args.target) if args.spec: spec = BuildSpec(spec=args.spec, target=args.target) if args.compiler or args.target: compiler, version = ('default', 'default') if args.compiler: compiler, version = args.compiler.split('-') spec = str(spec) if spec else None spec = BuildSpec(compiler=compiler, compiler_version=version, target=args.target, spec=spec) if not spec: spec = default_spec() # Save unknown args for actions to parse later args.args += extras return args, spec
spec = default_spec() # Save unknown args for actions to parse later args.args += extras return args, spec if __name__ == '__main__': args, spec = parse_args() if args.build_dir != '.': if not os.path.isdir(args.build_dir): os.makedirs(args.build_dir) os.chdir(args.build_dir) if spec.target == current_os() and spec.arch == current_arch(): inspect_host(spec) if args.command == 'inspect': sys.exit(0) # set up environment env = Env({ 'dryrun': args.dry_run, 'args': args, 'project': args.project, 'branch': args.branch, 'spec': spec, }) Scripts.load()
def _is_cross_compile(os, arch): return os != current_os() or arch != current_arch()
def run(self, env): config = env.config sh = env.shell parser = argparse.ArgumentParser() parser.add_argument('--skip-install', action='store_true') args = parser.parse_known_args(env.args.args)[0] sudo = config.get('sudo', current_os() == 'linux') sudo = ['sudo'] if sudo else [] packages = self.packages if self.packages else config.get( 'packages', []) if packages: packages = UniqueList(packages) pkg_tool = package_tool() print('Installing packages via {}: {}'.format( pkg_tool.value, ', '.join(packages))) was_dryrun = sh.dryrun if args.skip_install: sh.dryrun = True if not InstallPackages.pkg_init_done: pkg_setup = UniqueList(config.get('pkg_setup', [])) if pkg_setup: for cmd in pkg_setup: if isinstance(cmd, str): cmd = cmd.split(' ') assert isinstance(cmd, list) sh.exec(*sudo, cmd, check=True, retries=3) pkg_update = config.get('pkg_update', None) if pkg_update: if not isinstance(pkg_update, list): pkg_update = pkg_update.split(' ') sh.exec(*sudo, pkg_update, check=True, retries=3) InstallPackages.pkg_init_done = True pkg_install = config['pkg_install'] if not isinstance(pkg_install, list): pkg_install = pkg_install.split(' ') pkg_install += packages sh.exec(*sudo, pkg_install, check=True, retries=3) if args.skip_install: sh.dryrun = was_dryrun setup_steps = env.config.get('setup_steps', []) if setup_steps: steps = [] for step in setup_steps: if not isinstance(step, list): step = step.split(' ') if step: steps.append([*sudo, *step]) if args.skip_install: return Script([ partial(set_dryrun, True), *steps, partial(set_dryrun, sh.dryrun) ], name='setup') return Script(steps, name='setup')