Пример #1
0
def collect_patches(series, series_id, url, rest_api=call_rest_api):
    """Collect patch information about a series from patchwork

    Uses the Patchwork REST API to collect information provided by patchwork
    about the status of each patch.

    Args:
        series (Series): Series object corresponding to the local branch
            containing the series
        series_id (str): Patch series ID number
        url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
        rest_api (function): API function to call to access Patchwork, for
            testing

    Returns:
        list: List of patches sorted by sequence number, each a Patch object

    Raises:
        ValueError: if the URL could not be read or the web page does not follow
            the expected structure
    """
    data = rest_api(url, 'series/%s/' % series_id)

    # Get all the rows, which are patches
    patch_dict = data['patches']
    count = len(patch_dict)
    num_commits = len(series.commits)
    if count != num_commits:
        tout.warning('Warning: Patchwork reports %d patches, series has %d' %
                     (count, num_commits))

    patches = []

    # Work through each row (patch) one at a time, collecting the information
    warn_count = 0
    for pw_patch in patch_dict:
        patch = Patch(pw_patch['id'])
        patch.parse_subject(pw_patch['name'])
        patches.append(patch)
    if warn_count > 1:
        tout.warning('   (total of %d warnings)' % warn_count)

    # Sort patches by patch number
    patches = sorted(patches, key=lambda x: x.seq)
    return patches
Пример #2
0
def Binman(args):
    """The main control code for binman

    This assumes that help and test options have already been dealt with. It
    deals with the core task of building images.

    Args:
        args: Command line arguments Namespace object
    """
    global Image
    global state

    if args.full_help:
        tools.print_full_help(
            os.path.join(os.path.dirname(os.path.realpath(sys.argv[0])),
                         'README.rst'))
        return 0

    # Put these here so that we can import this module without libfdt
    from binman.image import Image
    from binman import state

    if args.cmd in ['ls', 'extract', 'replace', 'tool']:
        try:
            tout.init(args.verbosity)
            tools.prepare_output_dir(None)
            if args.cmd == 'ls':
                ListEntries(args.image, args.paths)

            if args.cmd == 'extract':
                ExtractEntries(args.image, args.filename, args.outdir,
                               args.paths, not args.uncompressed, args.format)

            if args.cmd == 'replace':
                ReplaceEntries(args.image,
                               args.filename,
                               args.indir,
                               args.paths,
                               do_compress=not args.compressed,
                               allow_resize=not args.fix_size,
                               write_map=args.map)

            if args.cmd == 'tool':
                tools.set_tool_paths(args.toolpath)
                if args.list:
                    bintool.Bintool.list_all()
                elif args.fetch:
                    if not args.bintools:
                        raise ValueError(
                            "Please specify bintools to fetch or 'all' or 'missing'"
                        )
                    bintool.Bintool.fetch_tools(bintool.FETCH_ANY,
                                                args.bintools)
                else:
                    raise ValueError("Invalid arguments to 'tool' subcommand")
        except:
            raise
        finally:
            tools.finalise_output_dir()
        return 0

    elf_params = None
    if args.update_fdt_in_elf:
        elf_params = args.update_fdt_in_elf.split(',')
        if len(elf_params) != 4:
            raise ValueError(
                'Invalid args %s to --update-fdt-in-elf: expected infile,outfile,begin_sym,end_sym'
                % elf_params)

    # Try to figure out which device tree contains our image description
    if args.dt:
        dtb_fname = args.dt
    else:
        board = args.board
        if not board:
            raise ValueError(
                'Must provide a board to process (use -b <board>)')
        board_pathname = os.path.join(args.build_dir, board)
        dtb_fname = os.path.join(board_pathname, 'u-boot.dtb')
        if not args.indir:
            args.indir = ['.']
        args.indir.append(board_pathname)

    try:
        tout.init(args.verbosity)
        elf.debug = args.debug
        cbfs_util.VERBOSE = args.verbosity > 2
        state.use_fake_dtb = args.fake_dtb

        # Normally we replace the 'u-boot' etype with 'u-boot-expanded', etc.
        # When running tests this can be disabled using this flag. When not
        # updating the FDT in image, it is not needed by binman, but we use it
        # for consistency, so that the images look the same to U-Boot at
        # runtime.
        use_expanded = not args.no_expanded
        try:
            tools.set_input_dirs(args.indir)
            tools.prepare_output_dir(args.outdir, args.preserve)
            tools.set_tool_paths(args.toolpath)
            state.SetEntryArgs(args.entry_arg)
            state.SetThreads(args.threads)

            images = PrepareImagesAndDtbs(dtb_fname, args.image,
                                          args.update_fdt, use_expanded)

            if args.test_section_timeout:
                # Set the first image to timeout, used in testThreadTimeout()
                images[list(images.keys())[0]].test_section_timeout = True
            invalid = False
            bintool.Bintool.set_missing_list(
                args.force_missing_bintools.split(',') if args.
                force_missing_bintools else None)
            for image in images.values():
                invalid |= ProcessImage(image,
                                        args.update_fdt,
                                        args.map,
                                        allow_missing=args.allow_missing,
                                        allow_fake_blobs=args.fake_ext_blobs)

            # Write the updated FDTs to our output files
            for dtb_item in state.GetAllFdts():
                tools.write_file(dtb_item._fname, dtb_item.GetContents())

            if elf_params:
                data = state.GetFdtForEtype('u-boot-dtb').GetContents()
                elf.UpdateFile(*elf_params, data)

            if invalid:
                tout.warning("\nSome images are invalid")

            # Use this to debug the time take to pack the image
            #state.TimingShow()
        finally:
            tools.finalise_output_dir()
    finally:
        tout.uninit()

    return 0
Пример #3
0
def ProcessImage(image,
                 update_fdt,
                 write_map,
                 get_contents=True,
                 allow_resize=True,
                 allow_missing=False,
                 allow_fake_blobs=False):
    """Perform all steps for this image, including checking and # writing it.

    This means that errors found with a later image will be reported after
    earlier images are already completed and written, but that does not seem
    important.

    Args:
        image: Image to process
        update_fdt: True to update the FDT wth entry offsets, etc.
        write_map: True to write a map file
        get_contents: True to get the image contents from files, etc., False if
            the contents is already present
        allow_resize: True to allow entries to change size (this does a re-pack
            of the entries), False to raise an exception
        allow_missing: Allow blob_ext objects to be missing
        allow_fake_blobs: Allow blob_ext objects to be faked with dummy files

    Returns:
        True if one or more external blobs are missing or faked,
        False if all are present
    """
    if get_contents:
        image.SetAllowMissing(allow_missing)
        image.SetAllowFakeBlob(allow_fake_blobs)
        image.GetEntryContents()
    image.GetEntryOffsets()

    # We need to pack the entries to figure out where everything
    # should be placed. This sets the offset/size of each entry.
    # However, after packing we call ProcessEntryContents() which
    # may result in an entry changing size. In that case we need to
    # do another pass. Since the device tree often contains the
    # final offset/size information we try to make space for this in
    # AddMissingProperties() above. However, if the device is
    # compressed we cannot know this compressed size in advance,
    # since changing an offset from 0x100 to 0x104 (for example) can
    # alter the compressed size of the device tree. So we need a
    # third pass for this.
    passes = 5
    for pack_pass in range(passes):
        try:
            image.PackEntries()
        except Exception as e:
            if write_map:
                fname = image.WriteMap()
                print("Wrote map file '%s' to show errors" % fname)
            raise
        image.SetImagePos()
        if update_fdt:
            image.SetCalculatedProperties()
            for dtb_item in state.GetAllFdts():
                dtb_item.Sync()
                dtb_item.Flush()
        image.WriteSymbols()
        sizes_ok = image.ProcessEntryContents()
        if sizes_ok:
            break
        image.ResetForPack()
    tout.info('Pack completed after %d pass(es)' % (pack_pass + 1))
    if not sizes_ok:
        image.Raise('Entries changed size after packing (tried %s passes)' %
                    passes)

    image.BuildImage()
    if write_map:
        image.WriteMap()
    missing_list = []
    image.CheckMissing(missing_list)
    if missing_list:
        tout.warning(
            "Image '%s' is missing external blobs and is non-functional: %s" %
            (image.name, ' '.join([e.name for e in missing_list])))
        _ShowHelpForMissingBlobs(missing_list)
    faked_list = []
    image.CheckFakedBlobs(faked_list)
    if faked_list:
        tout.warning(
            "Image '%s' has faked external blobs and is non-functional: %s" %
            (image.name, ' '.join(
                [os.path.basename(e.GetDefaultFilename())
                 for e in faked_list])))
    missing_bintool_list = []
    image.check_missing_bintools(missing_bintool_list)
    if missing_bintool_list:
        tout.warning(
            "Image '%s' has missing bintools and is non-functional: %s" %
            (image.name, ' '.join([
                os.path.basename(bintool.name)
                for bintool in missing_bintool_list
            ])))
    return any([missing_list, faked_list, missing_bintool_list])
Пример #4
0
def ReplaceEntries(image_fname,
                   input_fname,
                   indir,
                   entry_paths,
                   do_compress=True,
                   allow_resize=True,
                   write_map=False):
    """Replace the data from one or more entries from input files

    Args:
        image_fname: Image filename to process
        input_fname: Single input filename to use if replacing one file, None
            otherwise
        indir: Input directory to use (for any number of files), else None
        entry_paths: List of entry paths to replace
        do_compress: True if the input data is uncompressed and may need to be
            compressed if the entry requires it, False if the data is already
            compressed.
        write_map: True to write a map file

    Returns:
        List of EntryInfo records that were written
    """
    image_fname = os.path.abspath(image_fname)
    image = Image.FromFile(image_fname)

    # Replace an entry from a single file, as a special case
    if input_fname:
        if not entry_paths:
            raise ValueError('Must specify an entry path to read with -f')
        if len(entry_paths) != 1:
            raise ValueError(
                'Must specify exactly one entry path to write with -f')
        entry = image.FindEntryPath(entry_paths[0])
        data = tools.read_file(input_fname)
        tout.notice("Read %#x bytes from file '%s'" % (len(data), input_fname))
        WriteEntryToImage(image,
                          entry,
                          data,
                          do_compress=do_compress,
                          allow_resize=allow_resize,
                          write_map=write_map)
        return

    # Otherwise we will input from a path given by the entry path of each entry.
    # This means that files must appear in subdirectories if they are part of
    # a sub-section.
    einfos = image.GetListEntries(entry_paths)[0]
    tout.notice("Replacing %d matching entries in image '%s'" %
                (len(einfos), image_fname))

    BeforeReplace(image, allow_resize)

    for einfo in einfos:
        entry = einfo.entry
        if entry.GetEntries():
            tout.info("Skipping section entry '%s'" % entry.GetPath())
            continue

        path = entry.GetPath()[1:]
        fname = os.path.join(indir, path)

        if os.path.exists(fname):
            tout.notice("Write entry '%s' from file '%s'" %
                        (entry.GetPath(), fname))
            data = tools.read_file(fname)
            ReplaceOneEntry(image, entry, data, do_compress, allow_resize)
        else:
            tout.warning("Skipping entry '%s' from missing file '%s'" %
                         (entry.GetPath(), fname))

    AfterReplace(image, allow_resize=allow_resize, write_map=write_map)
    return image
Пример #5
0
def _ShowBlobHelp(path, text):
    tout.warning('\n%s:' % path)
    for line in text.splitlines():
        tout.warning('   %s' % line)
Пример #6
0
def check_patchwork_status(series,
                           series_id,
                           branch,
                           dest_branch,
                           force,
                           show_comments,
                           url,
                           rest_api=call_rest_api,
                           test_repo=None):
    """Check the status of a series on Patchwork

    This finds review tags and comments for a series in Patchwork, displaying
    them to show what is new compared to the local series.

    Args:
        series (Series): Series object for the existing branch
        series_id (str): Patch series ID number
        branch (str): Existing branch to update, or None
        dest_branch (str): Name of new branch to create, or None
        force (bool): True to force overwriting dest_branch if it exists
        show_comments (bool): True to show the comments on each patch
        url (str): URL of patchwork server, e.g. 'https://patchwork.ozlabs.org'
        rest_api (function): API function to call to access Patchwork, for
            testing
        test_repo (pygit2.Repository): Repo to use (use None unless testing)
    """
    patches = collect_patches(series, series_id, url, rest_api)
    col = terminal.Color()
    count = len(series.commits)
    new_rtag_list = [None] * count
    review_list = [None] * count

    patch_for_commit, _, warnings = compare_with_series(series, patches)
    for warn in warnings:
        tout.warning(warn)

    patch_list = [patch_for_commit.get(c) for c in range(len(series.commits))]

    with concurrent.futures.ThreadPoolExecutor(max_workers=16) as executor:
        futures = executor.map(find_new_responses, repeat(new_rtag_list),
                               repeat(review_list), range(count),
                               series.commits, patch_list, repeat(url),
                               repeat(rest_api))
    for fresponse in futures:
        if fresponse:
            raise fresponse.exception()

    num_to_add = 0
    for seq, cmt in enumerate(series.commits):
        patch = patch_for_commit.get(seq)
        if not patch:
            continue
        terminal.tprint('%3d %s' % (patch.seq, patch.subject[:50]),
                        colour=col.BLUE)
        cmt = series.commits[seq]
        base_rtags = cmt.rtags
        new_rtags = new_rtag_list[seq]

        indent = ' ' * 2
        show_responses(base_rtags, indent, False)
        num_to_add += show_responses(new_rtags, indent, True)
        if show_comments:
            for review in review_list[seq]:
                terminal.tprint('Review: %s' % review.meta, colour=col.RED)
                for snippet in review.snippets:
                    for line in snippet:
                        quoted = line.startswith('>')
                        terminal.tprint('    %s' % line,
                                        colour=col.MAGENTA if quoted else None)
                    terminal.tprint()

    terminal.tprint(
        "%d new response%s available in patchwork%s" %
        (num_to_add, 's' if num_to_add != 1 else '',
         '' if dest_branch else ' (use -d to write them to a new branch)'))

    if dest_branch:
        num_added = create_branch(series, new_rtag_list, branch, dest_branch,
                                  force, test_repo)
        terminal.tprint(
            "%d response%s added from patchwork into new branch '%s'" %
            (num_added, 's' if num_added != 1 else '', dest_branch))