def EnsureCompiled(fname): """Compile an fdt .dts source file into a .dtb binary blob if needed. Args: fname: Filename (if .dts it will be compiled). It not it will be left alone Returns: Filename of resulting .dtb file """ _, ext = os.path.splitext(fname) if ext != '.dts': return fname dts_input = tools.GetOutputFilename('source.dts') dtb_output = tools.GetOutputFilename('source.dtb') search_paths = [os.path.join(os.getcwd(), 'include')] root, _ = os.path.splitext(fname) args = ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__'] args += ['-Ulinux'] for path in search_paths: args.extend(['-I', path]) args += ['-o', dts_input, fname] command.Run('cc', *args) # If we don't have a directory, put it in the tools tempdir search_list = [] for path in search_paths: search_list.extend(['-i', path]) args = ['-I', 'dts', '-o', dtb_output, '-O', 'dtb'] args.extend(search_list) args.append(dts_input) command.Run('dtc', *args) return dtb_output
def get_argparser(): parser = argparse.ArgumentParser( description='Docker Python implementation') subparsers = parser.add_subparsers(title="mocker_commands") empty_mocker_command_error_message = \ "mocker.py: error: no mocker_command given, printing help\n" empty_mocker_command = command.Help(parser, empty_mocker_command_error_message) parser.set_defaults(mocker_command=empty_mocker_command) mocker_commands = [ command.Initialise(), command.Pull(), command.RemoveImage(), command.Images(), command.Processes(), command.Run(), command.Execute(), command.Logs(), command.RemoveContainer(), command.Commit(), command.Help(parser), command.Clean(), ] for mocker_command in mocker_commands: mocker_command.add_parser_to(subparsers) return parser
def Run(name, *args, **kwargs): """Run a tool with some arguments This runs a 'tool', which is a program used by binman to process files and perhaps produce some output. Tools can be located on the PATH or in a search path. Args: name: Command name to run args: Arguments to the tool kwargs: Options to pass to command.run() Returns: CommandResult object """ try: env = None if tool_search_paths: env = dict(os.environ) env['PATH'] = ':'.join(tool_search_paths) + ':' + env['PATH'] return command.Run(name, *args, capture=True, capture_stderr=True, env=env, **kwargs) except: if env and not PathHasFile(env['PATH'], name): msg = "Please install tool '%s'" % name package = packages.get(name) if package: msg += " (e.g. from package '%s')" % package raise ValueError(msg) raise
def EnsureCompiled(fname, tmpdir=None, capture_stderr=False): """Compile an fdt .dts source file into a .dtb binary blob if needed. Args: fname: Filename (if .dts it will be compiled). It not it will be left alone tmpdir: Temporary directory for output files, or None to use the tools-module output directory Returns: Filename of resulting .dtb file """ _, ext = os.path.splitext(fname) if ext != '.dts': return fname if tmpdir: dts_input = os.path.join(tmpdir, 'source.dts') dtb_output = os.path.join(tmpdir, 'source.dtb') else: dts_input = tools.GetOutputFilename('source.dts') dtb_output = tools.GetOutputFilename('source.dtb') search_paths = [os.path.join(os.getcwd(), 'include')] root, _ = os.path.splitext(fname) args = ['-E', '-P', '-x', 'assembler-with-cpp', '-D__ASSEMBLY__'] args += ['-Ulinux'] for path in search_paths: args.extend(['-I', path]) args += ['-o', dts_input, fname] command.Run('cc', *args) # If we don't have a directory, put it in the tools tempdir search_list = [] for path in search_paths: search_list.extend(['-i', path]) args = ['-I', 'dts', '-o', dtb_output, '-O', 'dtb', '-W', 'no-unit_address_vs_reg'] args.extend(search_list) args.append(dts_input) dtc = os.environ.get('DTC') or 'dtc' command.Run(dtc, *args, capture_stderr=capture_stderr) return dtb_output
def Run(name, *args): try: return command.Run(name, *args, cwd=outdir, capture=True) except: if not PathHasFile(name): msg = "Plesae install tool '%s'" % name package = packages.get(name) if package: msg += " (e.g. from package '%s')" % package raise ValueError(msg) raise
def GenerateISODisk(uri, hostname, diskPool, sshPrivateKeyfile, sshPublicKeyfile): metadata = generateMetadata(hostname) userdata = generateUserdata(hostname, sshPrivateKeyfile, sshPublicKeyfile) netdata = generateNetworkData() tmpDir = path.join("/tmp", hostname) os.makedirs(tmpDir, exist_ok=True) mPath = path.join(tmpDir, "meta-data") f = open(mPath, "w") f.write("#cloud-config") f.write(yaml.dump(metadata, default_flow_style=False)) f.close() # these configs don't seem to work in the `user-data` file, so use `network-config` nPath = path.join(tmpDir, "network-config") f = open(nPath, "w") f.write("#cloud-config") f.write(yaml.dump(netdata, default_flow_style=False)) f.close() uPath = path.join(tmpDir, "user-data") f = open(uPath, "w") f.write("#cloud-config\n\n") f.write(yaml.dump(userdata, default_flow_style=False)) f.close() ciFile = path.join(tmpDir, "cloud-init.iso") command.Run([ "mkisofs", "-output", ciFile, "-volid", "cidata", "-joliet", "-rock", mPath, uPath, nPath, ]) ciVol = CloudInitDiskName(hostname) # Delete and recreate in case cloud init has been modified virsh.DeleteVolume(uri, diskPool, ciVol) virsh.UploadVolume(uri, diskPool, ciVol, 5242800, "raw", ciFile) return (ciVol)
def RunBinman(options, args): """Main entry point to binman once arguments are parsed Args: options: Command-line options args: Non-option arguments """ ret_code = 0 # For testing: This enables full exception traces. #options.debug = True if not options.debug: sys.tracebacklimit = 0 if options.test: RunTests() elif options.test_coverage: RunTestCoverage() elif options.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) else: try: ret_code = control.Binman(options, args) except Exception as e: print 'binman: %s' % e if options.debug: print traceback.print_exc() ret_code = 1 return ret_code
help='Show boards with unknown build result') parser.add_option( '-o', '--output-dir', type='string', dest='output_dir', default='..', help= 'Directory where all builds happen and buildman has its workspace (default is ../)' ) parser.usage = """buildman -b <branch> [options] Build U-Boot for all commits in a branch. Use -n to do a dry run""" (options, args) = parser.parse_args() # Run our meagre tests if options.test: RunTests() elif options.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(sys.argv[0]), 'README') command.Run(pager, fname) # Build selected commits for selected boards else: control.DoBuildman(options, args)
def DoBuildman(options, args, toolchains=None, make_func=None, boards=None, clean_dir=False): """The main control code for buildman Args: options: Command line options object args: Command line arguments (list of strings) toolchains: Toolchains to use - this should be a Toolchains() object. If None, then it will be created and scanned make_func: Make function to use for the builder. This is called to execute 'make'. If this is None, the normal function will be used, which calls the 'make' tool with suitable arguments. This setting is useful for tests. board: Boards() object to use, containing a list of available boards. If this is None it will be created and scanned. """ global builder if options.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) return 0 gitutil.Setup() col = terminal.Color() options.git_dir = os.path.join(options.git, '.git') no_toolchains = toolchains is None if no_toolchains: toolchains = toolchain.Toolchains(options.override_toolchain) if options.fetch_arch: if options.fetch_arch == 'list': sorted_list = toolchains.ListArchs() print col.Color( col.BLUE, 'Available architectures: %s\n' % ' '.join(sorted_list)) return 0 else: fetch_arch = options.fetch_arch if fetch_arch == 'all': fetch_arch = ','.join(toolchains.ListArchs()) print col.Color(col.CYAN, '\nDownloading toolchains: %s' % fetch_arch) for arch in fetch_arch.split(','): print ret = toolchains.FetchAndInstall(arch) if ret: return ret return 0 if no_toolchains: toolchains.GetSettings() toolchains.Scan(options.list_tool_chains and options.verbose) if options.list_tool_chains: toolchains.List() print return 0 # Work out how many commits to build. We want to build everything on the # branch. We also build the upstream commit as a control so we can see # problems introduced by the first commit on the branch. count = options.count has_range = options.branch and '..' in options.branch if count == -1: if not options.branch: count = 1 else: if has_range: count, msg = gitutil.CountCommitsInRange( options.git_dir, options.branch) else: count, msg = gitutil.CountCommitsInBranch( options.git_dir, options.branch) if count is None: sys.exit(col.Color(col.RED, msg)) elif count == 0: sys.exit( col.Color(col.RED, "Range '%s' has no commits" % options.branch)) if msg: print col.Color(col.YELLOW, msg) count += 1 # Build upstream commit also if not count: str = ("No commits found to process in branch '%s': " "set branch's upstream or use -c flag" % options.branch) sys.exit(col.Color(col.RED, str)) # Work out what subset of the boards we are building if not boards: board_file = os.path.join(options.output_dir, 'boards.cfg') genboardscfg = os.path.join(options.git, 'tools/genboardscfg.py') status = subprocess.call([genboardscfg, '-o', board_file]) if status != 0: sys.exit("Failed to generate boards.cfg") boards = board.Boards() boards.ReadBoards(board_file) exclude = [] if options.exclude: for arg in options.exclude: exclude += arg.split(',') if options.boards: requested_boards = [] for b in options.boards: requested_boards += b.split(',') else: requested_boards = None why_selected, board_warnings = boards.SelectBoards(args, exclude, requested_boards) selected = boards.GetSelected() if not len(selected): sys.exit(col.Color(col.RED, 'No matching boards found')) # Read the metadata from the commits. First look at the upstream commit, # then the ones in the branch. We would like to do something like # upstream/master~..branch but that isn't possible if upstream/master is # a merge commit (it will list all the commits that form part of the # merge) # Conflicting tags are not a problem for buildman, since it does not use # them. For example, Series-version is not useful for buildman. On the # other hand conflicting tags will cause an error. So allow later tags # to overwrite earlier ones by setting allow_overwrite=True if options.branch: if count == -1: if has_range: range_expr = options.branch else: range_expr = gitutil.GetRangeInBranch(options.git_dir, options.branch) upstream_commit = gitutil.GetUpstream(options.git_dir, options.branch) series = patchstream.GetMetaDataForList(upstream_commit, options.git_dir, 1, series=None, allow_overwrite=True) series = patchstream.GetMetaDataForList(range_expr, options.git_dir, None, series, allow_overwrite=True) else: # Honour the count series = patchstream.GetMetaDataForList(options.branch, options.git_dir, count, series=None, allow_overwrite=True) else: series = None if not options.dry_run: options.verbose = True if not options.summary: options.show_errors = True # By default we have one thread per CPU. But if there are not enough jobs # we can have fewer threads and use a high '-j' value for make. if not options.threads: options.threads = min(multiprocessing.cpu_count(), len(selected)) if not options.jobs: options.jobs = max(1, (multiprocessing.cpu_count() + len(selected) - 1) / len(selected)) if not options.step: options.step = len(series.commits) - 1 gnu_make = command.Output(os.path.join(options.git, 'scripts/show-gnu-make'), raise_on_error=False).rstrip() if not gnu_make: sys.exit('GNU Make not found') # Create a new builder with the selected options. output_dir = options.output_dir if options.branch: dirname = options.branch.replace('/', '_') # As a special case allow the board directory to be placed in the # output directory itself rather than any subdirectory. if not options.no_subdirs: output_dir = os.path.join(options.output_dir, dirname) if clean_dir and os.path.exists(output_dir): shutil.rmtree(output_dir) CheckOutputDir(output_dir) builder = Builder(toolchains, output_dir, options.git_dir, options.threads, options.jobs, gnu_make=gnu_make, checkout=True, show_unknown=options.show_unknown, step=options.step, no_subdirs=options.no_subdirs, full_path=options.full_path, verbose_build=options.verbose_build, incremental=options.incremental, per_board_out_dir=options.per_board_out_dir, config_only=options.config_only, squash_config_y=not options.preserve_config_y, warnings_as_errors=options.warnings_as_errors) builder.force_config_on_failure = not options.quick if make_func: builder.do_make = make_func # For a dry run, just show our actions as a sanity check if options.dry_run: ShowActions(series, why_selected, selected, builder, options, board_warnings) else: builder.force_build = options.force_build builder.force_build_failures = options.force_build_failures builder.force_reconfig = options.force_reconfig builder.in_tree = options.in_tree # Work out which boards to build board_selected = boards.GetSelectedDict() if series: commits = series.commits # Number the commits for test purposes for commit in range(len(commits)): commits[commit].sequence = commit else: commits = None Print( GetActionSummary(options.summary, commits, board_selected, options)) # We can't show function sizes without board details at present if options.show_bloat: options.show_detail = True builder.SetDisplayOptions(options.show_errors, options.show_sizes, options.show_detail, options.show_bloat, options.list_error_boards, options.show_config, options.show_environment) if options.summary: builder.ShowSummary(commits, board_selected) else: fail, warned = builder.BuildBoards(commits, board_selected, options.keep_outputs, options.verbose) if fail: return 128 elif warned: return 129 return 0
def Binman(options, args): """The main control code for binman This assumes that help and test options have already been dealt with. It deals with the core task of building images. Args: options: Command line options object args: Command line arguments (list of strings) """ global images if options.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) return 0 # Try to figure out which device tree contains our image description if options.dt: dtb_fname = options.dt else: board = options.board if not board: raise ValueError( 'Must provide a board to process (use -b <board>)') board_pathname = os.path.join(options.build_dir, board) dtb_fname = os.path.join(board_pathname, 'u-boot.dtb') if not options.indir: options.indir = ['.'] options.indir.append(board_pathname) try: # Import these here in case libfdt.py is not available, in which case # the above help option still works. import fdt import fdt_util tout.Init(options.verbosity) elf.debug = options.debug try: tools.SetInputDirs(options.indir) tools.PrepareOutputDir(options.outdir, options.preserve) SetEntryArgs(options.entry_arg) # Get the device tree ready by compiling it and copying the compiled # output into a file in our output directly. Then scan it for use # in binman. dtb_fname = fdt_util.EnsureCompiled(dtb_fname) fname = tools.GetOutputFilename('u-boot-out.dtb') with open(dtb_fname) as infd: with open(fname, 'wb') as outfd: outfd.write(infd.read()) dtb = fdt.FdtScan(fname) # Note the file so that GetFdt() can find it fdt_files['u-boot.dtb'] = dtb node = _FindBinmanNode(dtb) if not node: raise ValueError("Device tree '%s' does not have a 'binman' " "node" % dtb_fname) images = _ReadImageDesc(node) # Prepare the device tree by making sure that any missing # properties are added (e.g. 'pos' and 'size'). The values of these # may not be correct yet, but we add placeholders so that the # size of the device tree is correct. Later, in # SetCalculatedProperties() we will insert the correct values # without changing the device-tree size, thus ensuring that our # entry offsets remain the same. for image in images.values(): if options.update_fdt: image.AddMissingProperties() image.ProcessFdt(dtb) dtb.Pack() dtb.Flush() for image in images.values(): # Perform all steps for this image, including checking and # writing it. This means that errors found with a later # image will be reported after earlier images are already # completed and written, but that does not seem important. image.GetEntryContents() image.GetEntryOffsets() image.PackEntries() image.CheckSize() image.CheckEntries() image.SetImagePos() if options.update_fdt: image.SetCalculatedProperties() image.ProcessEntryContents() image.WriteSymbols() image.BuildImage() if options.map: image.WriteMap() with open(fname, 'wb') as outfd: outfd.write(dtb.GetContents()) finally: tools.FinaliseOutputDir() finally: tout.Uninit() return 0
def Binman(args): """The main control code for binman This assumes that help and test options have already been dealt with. It deals with the core task of building images. Args: args: Command line arguments Namespace object """ if args.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) return 0 if args.cmd == 'ls': try: tools.PrepareOutputDir(None) ListEntries(args.image, args.paths) finally: tools.FinaliseOutputDir() return 0 if args.cmd == 'extract': try: tools.PrepareOutputDir(None) ExtractEntries(args.image, args.filename, args.outdir, args.paths, not args.uncompressed) finally: tools.FinaliseOutputDir() return 0 if args.cmd == 'replace': try: tools.PrepareOutputDir(None) ReplaceEntries(args.image, args.filename, args.indir, args.paths, do_compress=not args.compressed, allow_resize=not args.fix_size, write_map=args.map) finally: tools.FinaliseOutputDir() return 0 # Try to figure out which device tree contains our image description if args.dt: dtb_fname = args.dt else: board = args.board if not board: raise ValueError( 'Must provide a board to process (use -b <board>)') board_pathname = os.path.join(args.build_dir, board) dtb_fname = os.path.join(board_pathname, 'u-boot.dtb') if not args.indir: args.indir = ['.'] args.indir.append(board_pathname) try: tout.Init(args.verbosity) elf.debug = args.debug cbfs_util.VERBOSE = args.verbosity > 2 state.use_fake_dtb = args.fake_dtb try: tools.SetInputDirs(args.indir) tools.PrepareOutputDir(args.outdir, args.preserve) tools.SetToolPaths(args.toolpath) state.SetEntryArgs(args.entry_arg) images = PrepareImagesAndDtbs(dtb_fname, args.image, args.update_fdt) for image in images.values(): ProcessImage(image, args.update_fdt, args.map) # Write the updated FDTs to our output files for dtb_item in state.GetAllFdts(): tools.WriteFile(dtb_item._fname, dtb_item.GetContents()) finally: tools.FinaliseOutputDir() finally: tout.Uninit() return 0
def Binman(options, args): """The main control code for binman This assumes that help and test options have already been dealt with. It deals with the core task of building images. Args: options: Command line options object args: Command line arguments (list of strings) """ global images if options.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) return 0 # Try to figure out which device tree contains our image description if options.dt: dtb_fname = options.dt else: board = options.board if not board: raise ValueError( 'Must provide a board to process (use -b <board>)') board_pathname = os.path.join(options.build_dir, board) dtb_fname = os.path.join(board_pathname, 'u-boot.dtb') if not options.indir: options.indir = ['.'] options.indir.append(board_pathname) try: tout.Init(options.verbosity) try: tools.SetInputDirs(options.indir) tools.PrepareOutputDir(options.outdir, options.preserve) fdt = fdt_select.FdtScan(dtb_fname) node = _FindBinmanNode(fdt) if not node: raise ValueError("Device tree '%s' does not have a 'binman' " "node" % dtb_fname) images = _ReadImageDesc(node) for image in images.values(): # Perform all steps for this image, including checking and # writing it. This means that errors found with a later # image will be reported after earlier images are already # completed and written, but that does not seem important. image.GetEntryContents() image.GetEntryPositions() image.PackEntries() image.CheckSize() image.CheckEntries() image.ProcessEntryContents() image.BuildImage() finally: tools.FinaliseOutputDir() finally: tout.Uninit() return 0
def Binman(args): """The main control code for binman This assumes that help and test options have already been dealt with. It deals with the core task of building images. Args: args: Command line arguments Namespace object """ global images if args.full_help: pager = os.getenv('PAGER') if not pager: pager = 'more' fname = os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])), 'README') command.Run(pager, fname) return 0 if args.cmd == 'ls': ListEntries(args.image, args.paths) return 0 if args.cmd == 'extract': try: tools.PrepareOutputDir(None) ExtractEntries(args.image, args.filename, args.outdir, args.paths, not args.uncompressed) finally: tools.FinaliseOutputDir() return 0 # Try to figure out which device tree contains our image description if args.dt: dtb_fname = args.dt else: board = args.board if not board: raise ValueError( 'Must provide a board to process (use -b <board>)') board_pathname = os.path.join(args.build_dir, board) dtb_fname = os.path.join(board_pathname, 'u-boot.dtb') if not args.indir: args.indir = ['.'] args.indir.append(board_pathname) try: # Import these here in case libfdt.py is not available, in which case # the above help option still works. import fdt import fdt_util tout.Init(args.verbosity) elf.debug = args.debug cbfs_util.VERBOSE = args.verbosity > 2 state.use_fake_dtb = args.fake_dtb try: tools.SetInputDirs(args.indir) tools.PrepareOutputDir(args.outdir, args.preserve) tools.SetToolPaths(args.toolpath) state.SetEntryArgs(args.entry_arg) # Get the device tree ready by compiling it and copying the compiled # output into a file in our output directly. Then scan it for use # in binman. dtb_fname = fdt_util.EnsureCompiled(dtb_fname) fname = tools.GetOutputFilename('u-boot.dtb.out') tools.WriteFile(fname, tools.ReadFile(dtb_fname)) dtb = fdt.FdtScan(fname) node = _FindBinmanNode(dtb) if not node: raise ValueError("Device tree '%s' does not have a 'binman' " "node" % dtb_fname) images = _ReadImageDesc(node) if args.image: skip = [] new_images = OrderedDict() for name, image in images.items(): if name in args.image: new_images[name] = image else: skip.append(name) images = new_images if skip and args.verbosity >= 2: print('Skipping images: %s' % ', '.join(skip)) state.Prepare(images, dtb) # Prepare the device tree by making sure that any missing # properties are added (e.g. 'pos' and 'size'). The values of these # may not be correct yet, but we add placeholders so that the # size of the device tree is correct. Later, in # SetCalculatedProperties() we will insert the correct values # without changing the device-tree size, thus ensuring that our # entry offsets remain the same. for image in images.values(): image.ExpandEntries() if args.update_fdt: image.AddMissingProperties() image.ProcessFdt(dtb) for dtb_item in state.GetFdts(): dtb_item.Sync(auto_resize=True) dtb_item.Pack() dtb_item.Flush() for image in images.values(): # Perform all steps for this image, including checking and # writing it. This means that errors found with a later # image will be reported after earlier images are already # completed and written, but that does not seem important. image.GetEntryContents() image.GetEntryOffsets() # We need to pack the entries to figure out where everything # should be placed. This sets the offset/size of each entry. # However, after packing we call ProcessEntryContents() which # may result in an entry changing size. In that case we need to # do another pass. Since the device tree often contains the # final offset/size information we try to make space for this in # AddMissingProperties() above. However, if the device is # compressed we cannot know this compressed size in advance, # since changing an offset from 0x100 to 0x104 (for example) can # alter the compressed size of the device tree. So we need a # third pass for this. passes = 3 for pack_pass in range(passes): try: image.PackEntries() image.CheckSize() image.CheckEntries() except Exception as e: if args.map: fname = image.WriteMap() print("Wrote map file '%s' to show errors" % fname) raise image.SetImagePos() if args.update_fdt: image.SetCalculatedProperties() for dtb_item in state.GetFdts(): dtb_item.Sync() sizes_ok = image.ProcessEntryContents() if sizes_ok: break image.ResetForPack() if not sizes_ok: image.Raise( 'Entries expanded after packing (tried %s passes)' % passes) image.WriteSymbols() image.BuildImage() if args.map: image.WriteMap() # Write the updated FDTs to our output files for dtb_item in state.GetFdts(): tools.WriteFile(dtb_item._fname, dtb_item.GetContents()) finally: tools.FinaliseOutputDir() finally: tout.Uninit() return 0
def Run(name, *args): command.Run(name, *args, cwd=outdir)