Exemplo n.º 1
0
def apply_patches(args, desc, source_dir, patch_src, target_dir, logwrite=lambda x:None):
    """
    Given a path of a directories of patches and SmPL patches apply
    them on the target directory. If requested refresh patches, or test
    a specific SmPL patch.
    """
    logwrite('Applying patches from %s to %s ...' % (patch_src, target_dir))
    test_cocci = args.test_cocci or args.profile_cocci
    test_cocci_found = False
    patches = []
    sempatches = []
    for root, dirs, files in os.walk(os.path.join(source_dir, patch_src)):
        for f in files:
            if not test_cocci and f.endswith('.patch'):
                patches.append(os.path.join(root, f))
            if f.endswith('.cocci'):
                if test_cocci:
                    if f not in test_cocci:
                        continue
                    test_cocci_found = True
                    if args.test_cocci:
                        logwrite("Testing Coccinelle SmPL patch: %s" % test_cocci)
                    elif args.profile_cocci:
                        logwrite("Profiling Coccinelle SmPL patch: %s" % test_cocci)
                sempatches.append(os.path.join(root, f))
    patches.sort()
    prefix_len = len(os.path.join(source_dir, patch_src)) + 1
    for pfile in patches:
        print_name = pfile[prefix_len:]
        # read the patch file
        p = patch.fromfile(pfile)
        # complain if it's not a patch
        if not p:
            raise Exception('No patch content found in %s' % print_name)
        # leading / seems to be stripped?
        if 'dev/null' in p.items[0].source:
            raise Exception('Patches creating files are not supported (in %s)' % print_name)
        # check if the first file the patch touches exists, if so
        # assume the patch needs to be applied -- otherwise continue
        patched_file = '/'.join(p.items[0].source.split('/')[1:])
        fullfn = os.path.join(target_dir, patched_file)
        if not os.path.exists(fullfn):
            if args.verbose:
                logwrite("Not applying %s, not needed" % print_name)
            continue
        if args.verbose:
            logwrite("Applying patch %s" % print_name)

        if args.refresh:
            # but for refresh, of course look at all files the patch touches
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(target_dir, patched_file)
                shutil.copyfile(fullfn, fullfn + '.orig_file')

        process = subprocess.Popen(['patch', '-p1'], stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
                                   close_fds=True, universal_newlines=True,
                                   cwd=target_dir)
        output = process.communicate(input=open(pfile, 'r').read())[0]
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)
        if process.returncode != 0:
            if not args.verbose:
                logwrite("Failed to apply changes from %s" % print_name)
                for line in output:
                    logwrite('> %s' % line)
            return 2

        if args.refresh:
            pfilef = open(pfile + '.tmp', 'a')
            pfilef.write(p.top_header)
            pfilef.flush()
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(target_dir, patched_file)
                process = subprocess.Popen(['diff', '-p', '-u', patched_file + '.orig_file', patched_file,
                                            '--label', 'a/' + patched_file,
                                            '--label', 'b/' + patched_file],
                                           stdout=pfilef, close_fds=True,
                                           universal_newlines=True, cwd=target_dir)
                process.wait()
                os.unlink(fullfn + '.orig_file')
                if not process.returncode in (0, 1):
                    logwrite("Failed to diff to refresh %s" % print_name)
                    pfilef.close()
                    os.unlink(pfile + '.tmp')
                    return 2
            pfilef.close()
            os.rename(pfile + '.tmp', pfile)

        # remove orig/rej files that patch sometimes creates
        for root, dirs, files in os.walk(target_dir):
            for f in files:
                if f[-5:] == '.orig' or f[-4:] == '.rej':
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply %s patch %s" % (desc, print_name))

    sempatches.sort()
    prefix_len = len(os.path.join(source_dir, patch_src)) + 1

    for cocci_file in sempatches:
        # Until Coccinelle picks this up
        pycocci = os.path.join(source_dir, 'devel/pycocci')
        cmd = [pycocci, cocci_file]
        extra_spatch_args = []
        if args.profile_cocci:
            cmd.append('--profile-cocci')
        cmd.append(os.path.abspath(target_dir))
        print_name = cocci_file[prefix_len:]
        if args.verbose:
            logwrite("Applying SmPL patch %s" % print_name)
        sprocess = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                                    close_fds=True, universal_newlines=True,
                                    cwd=target_dir)
        output = sprocess.communicate()[0]
        sprocess.wait()
        if sprocess.returncode != 0:
            logwrite("Failed to process SmPL patch %s with %i" % (print_name, sprocess.returncode))
            return 2
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)

        # remove cocci_backup files
        for root, dirs, files in os.walk(target_dir):
            for f in files:
                if f.endswith('.cocci_backup'):
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply %s SmPL patch %s" % (desc, print_name))

    if test_cocci and test_cocci_found:
        logwrite('Done!')
        sys.exit(0)
def process(kerneldir, outdir, copy_list_file, git_revision=None,
            clean=False, refresh=False, base_name="Linux", gitdebug=False,
            verbose=False, extra_driver=[], kup=False,
            kup_test=False,
            logwrite=lambda x:None,
            git_tracked_version=False):
    class Args(object):
        def __init__(self, kerneldir, outdir, copy_list_file,
                     git_revision, clean, refresh, base_name,
                     gitdebug, verbose, extra_driver, kup,
                     kup_test):
            self.kerneldir = kerneldir
            self.outdir = outdir
            self.copy_list = copy_list_file
            self.git_revision = git_revision
            self.clean = clean
            self.refresh = refresh
            self.base_name = base_name
            self.gitdebug = gitdebug
            self.verbose = verbose
            self.extra_driver = extra_driver
            self.kup = kup
            self.kup_test = kup_test
    def git_paranoia(tree=None, logwrite=lambda x:None):
        data = git.paranoia(tree)
        if (data['r'] != 0):
            logwrite('Cannot use %s' % tree)
            logwrite('%s' % data['output'])
            sys.exit(data['r'])
        else:
            logwrite('Validated tree: %s' % tree)

    args = Args(kerneldir, outdir, copy_list_file,
                git_revision, clean, refresh, base_name,
                gitdebug, verbose, extra_driver, kup, kup_test)
    rel_prep = None

    # start processing ...
    if (args.kup or args.kup_test):
        git_paranoia(source_dir, logwrite)
        git_paranoia(kerneldir, logwrite)

        rel_describe = git.describe(rev=None, tree=source_dir, extra_args=['--dirty'])
        release = os.path.basename(args.outdir)
        version = release.replace("backports-", "")

        rel_prep = get_rel_prep(version)
        if (not rel_prep):
            logwrite('Invalid backports release name: %s' % release)
            logwrite('For rules on the release name see upload_release()')
            sys.exit(1)
        rel_type = "linux-stable"
        if (not rel_prep['stable']):
            rel_type = "linux-next"
        if (rel_prep['expected_tag'] != rel_describe):
            logwrite('Unexpected %s based backports release tag on' % rel_type)
            logwrite('the backports tree tree: %s\n' % rel_describe)
            logwrite('You asked to make a release with this ')
            logwrite('directory name: %s' % release)
            logwrite('The actual expected tag we should find on')
            logwrite('the backports tree then is: %s\n' % rel_prep['expected_tag'])
            logwrite('For rules on the release name see upload_release()')
            sys.exit(1)

    copy_list = read_copy_list(args.copy_list)
    deplist = read_dependencies(os.path.join(source_dir, 'dependencies'))

    # validate output directory
    check_output_dir(args.outdir, args.clean)

    # do the copy
    backport_files = [(x, x) for x in [
        'Kconfig', 'Makefile', 'Makefile.build', 'Makefile.kernel', '.gitignore',
        'Makefile.real', 'compat/', 'backport-include/', 'kconf/', 'defconfigs/',
        'scripts/', '.blacklist.map', 'udev/',
    ]]
    if not args.git_revision:
        logwrite('Copy original source files ...')
    else:
        logwrite('Get original source files from git ...')
    
    copy_files(os.path.join(source_dir, 'backport'), backport_files, args.outdir)

    git_debug_init(args)

    if not args.git_revision:
        copy_files(args.kerneldir, copy_list, args.outdir)
    else:
        copy_git_files(args.kerneldir, copy_list, args.git_revision, args.outdir)

    # FIXME: should we add a git version of this (e.g. --git-extra-driver)?
    for src, copy_list in args.extra_driver:
        if (args.kup or args.kup_test):
            git_paranoia(src)
        copy_files(src, read_copy_list(open(copy_list, 'r')), args.outdir)

    git_debug_snapshot(args, 'Add driver sources')

    disable_list = add_automatic_backports(args)
    if disable_list:
        bpcfg = kconfig.ConfigTree(os.path.join(args.outdir, 'compat', 'Kconfig'))
        bpcfg.disable_symbols(disable_list)
    git_debug_snapshot(args, 'Add automatic backports')

    logwrite('Apply patches ...')
    patches = []
    sempatches = []
    for root, dirs, files in os.walk(os.path.join(source_dir, 'patches')):
        for f in files:
            if f.endswith('.patch'):
                patches.append(os.path.join(root, f))
            if f.endswith('.cocci'):
                sempatches.append(os.path.join(root, f))
    patches.sort()
    prefix_len = len(os.path.join(source_dir, 'patches')) + 1
    for pfile in patches:
        print_name = pfile[prefix_len:]
        # read the patch file
        p = patch.fromfile(pfile)
        # complain if it's not a patch
        if not p:
            raise Exception('No patch content found in %s' % print_name)
        # leading / seems to be stripped?
        if 'dev/null' in p.items[0].source:
            raise Exception('Patches creating files are not supported (in %s)' % print_name)
        # check if the first file the patch touches exists, if so
        # assume the patch needs to be applied -- otherwise continue
        patched_file = '/'.join(p.items[0].source.split('/')[1:])
        fullfn = os.path.join(args.outdir, patched_file)
        if not os.path.exists(fullfn):
            if args.verbose:
                logwrite("Not applying %s, not needed" % print_name)
            continue
        if args.verbose:
            logwrite("Applying patch %s" % print_name)

        if args.refresh:
            # but for refresh, of course look at all files the patch touches
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(args.outdir, patched_file)
                shutil.copyfile(fullfn, fullfn + '.orig_file')

        process = subprocess.Popen(['patch', '-p1'], stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
                                   close_fds=True, universal_newlines=True,
                                   cwd=args.outdir)
        output = process.communicate(input=open(pfile, 'r').read())[0]
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)
        if process.returncode != 0:
            if not args.verbose:
                logwrite("Failed to apply changes from %s" % print_name)
                for line in output:
                    logwrite('> %s' % line)
#            return 2

        if args.refresh:
            pfilef = open(pfile + '.tmp', 'a')
            pfilef.write(p.top_header)
            pfilef.flush()
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(args.outdir, patched_file)
                process = subprocess.Popen(['diff', '-p', '-u', patched_file + '.orig_file', patched_file,
                                            '--label', 'a/' + patched_file,
                                            '--label', 'b/' + patched_file],
                                           stdout=pfilef, close_fds=True,
                                           universal_newlines=True, cwd=args.outdir)
                process.wait()
                os.unlink(fullfn + '.orig_file')
                if not process.returncode in (0, 1):
                    logwrite("Failed to diff to refresh %s" % print_name)
                    pfilef.close()
                    os.unlink(pfile + '.tmp')
                    return 3
            pfilef.close()
            os.rename(pfile + '.tmp', pfile)

        # remove orig/rej files that patch sometimes creates
        for root, dirs, files in os.walk(args.outdir):
            for f in files:
                if f[-5:] == '.orig' or f[-4:] == '.rej':
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply backport patch %s" % print_name)

    sempatches.sort()
    with tempdir() as t:
        if not args.gitdebug and sempatches:
            # combine all spatches
            fn = os.path.join(t, 'combined.cocci')
            f = open(fn, 'w')
            for cocci_file in sempatches:
                for l in open(cocci_file, 'r'):
                    f.write(l)
                f.write('\n')
            f.close()
            sempatches = [fn]
            prefix_len = 0
        else:
            prefix_len = len(os.path.join(source_dir, 'patches')) + 1
        for cocci_file in sempatches:
            print_name = cocci_file[prefix_len:]
            if args.verbose:
                logwrite("Applying patch %s" % print_name)

            process = subprocess.Popen(['spatch', '--sp-file', cocci_file, '--in-place',
                                        '--backup-suffix', '.cocci_backup', '--dir', '.'],
                                       stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                                       close_fds=True, universal_newlines=True,
                                       cwd=args.outdir)
            output = process.communicate()[0]
            output = output.split('\n')
            if output[-1] == '':
                output = output[:-1]
            if args.verbose:
                for line in output:
                    logwrite('> %s' % line)
            if process.returncode != 0:
                if not args.verbose:
                    logwrite("Failed to apply changes from %s" % print_name)
                    for line in output:
                        logwrite('> %s' % line)
#                return 2

            # remove cocci_backup files
            for root, dirs, files in os.walk(args.outdir):
                for f in files:
                    if f.endswith('.cocci_backup'):
                        os.unlink(os.path.join(root, f))
            git_debug_snapshot(args, "apply backport patch %s" % print_name)

    # some post-processing is required
    configtree = kconfig.ConfigTree(os.path.join(args.outdir, 'Kconfig'))
    logwrite('Modify Kconfig tree ...')
    configtree.prune_sources(ignore=['Kconfig.kernel', 'Kconfig.versions'])
    git_debug_snapshot(args, "prune Kconfig tree")
    configtree.force_tristate_modular()
    git_debug_snapshot(args, "force tristate options modular")
    configtree.modify_selects()
    git_debug_snapshot(args, "convert select to depends on")

    # write the versioning file
    if git_tracked_version:
        backports_version = "(see git)"
        kernel_version = "(see git)"
    else:
        backports_version = git.describe(tree=source_dir, extra_args=['--long'])
        kernel_version = git.describe(rev=args.git_revision or 'HEAD',
                                      tree=args.kerneldir,
                                      extra_args=['--long'])
    f = open(os.path.join(args.outdir, 'versions'), 'w')
    f.write('BACKPORTS_VERSION="%s"\n' % backports_version)
    f.write('BACKPORTED_KERNEL_VERSION="%s"\n' % kernel_version)
    f.write('BACKPORTED_KERNEL_NAME="%s"\n' % args.base_name)
    if git_tracked_version:
        f.write('BACKPORTS_GIT_TRACKED="backport tracker ID: $(shell git rev-parse HEAD 2>/dev/null || echo \'not built in git tree\')"\n')
    f.close()

    symbols = configtree.symbols()

    # write local symbol list -- needed during build
    f = open(os.path.join(args.outdir, '.local-symbols'), 'w')
    for sym in symbols:
        f.write('%s=\n' % sym)
    f.close()

    git_debug_snapshot(args, "add versions/symbols files")

    logwrite('Rewrite Makefiles and Kconfig files ...')

    # rewrite Makefile and source symbols
    regexes = []
    for some_symbols in [symbols[i:i + 50] for i in range(0, len(symbols), 50)]:
        r = 'CONFIG_((' + '|'.join([s + '(_MODULE)?' for s in some_symbols]) + ')([^A-Za-z0-9_]|$))'
        regexes.append(re.compile(r, re.MULTILINE))
    for root, dirs, files in os.walk(args.outdir):
        # don't go into .git dir (possible debug thing)
        if '.git' in dirs:
            dirs.remove('.git')
        for f in files:
            data = open(os.path.join(root, f), 'r').read()
            for r in regexes:
                data = r.sub(r'CPTCFG_\1', data)
            data = re.sub(r'\$\(srctree\)', '$(backport_srctree)', data)
            data = re.sub(r'-Idrivers', '-I$(backport_srctree)/drivers', data)
            fo = open(os.path.join(root, f), 'w')
            fo.write(data)
            fo.close()

    git_debug_snapshot(args, "rename config symbol / srctree usage")

    # disable unbuildable Kconfig symbols and stuff Makefiles that doesn't exist
    maketree = make.MakeTree(os.path.join(args.outdir, 'Makefile.kernel'))
    disable_kconfig = []
    disable_makefile = []
    for sym in maketree.get_impossible_symbols():
        disable_kconfig.append(sym[7:])
        disable_makefile.append(sym[7:])

    configtree.disable_symbols(disable_kconfig)
    git_debug_snapshot(args, "disable impossible kconfig symbols")

    # add kernel version dependencies to Kconfig, from the dependency list
    # we read previously
    for sym in tuple(deplist.keys()):
        new = []
        for dep in deplist[sym]:
            if "kconfig:" in dep:
                    kconfig_expr = dep.replace('kconfig: ', '')
                    new.append(kconfig_expr)
            elif (dep == "DISABLE"):
                    new.append('BACKPORT_DISABLED_KCONFIG_OPTION')
            else:
                    new.append('!BACKPORT_KERNEL_%s' % dep.replace('.', '_'))
        deplist[sym] = new
    configtree.add_dependencies(deplist)
    git_debug_snapshot(args, "add kernel version dependencies")

    # disable things in makefiles that can't be selected and that the
    # build shouldn't recurse into because they don't exist -- if we
    # don't do that then a symbol from the kernel could cause the build
    # to attempt to recurse and fail
    #
    # Note that we split the regex after 50 symbols, this is because of a
    # limitation in the regex implementation (it only supports 100 nested
    # groups -- 50 seemed safer and is still fast)
    regexes = []
    for some_symbols in [disable_makefile[i:i + 50] for i in range(0, len(disable_makefile), 50)]:
        r = '^([^#].*((CPTCFG|CONFIG)_(' + '|'.join([s for s in some_symbols]) + ')))'
        regexes.append(re.compile(r, re.MULTILINE))
    for f in maketree.get_makefiles():
        data = open(f, 'r').read()
        for r in regexes:
            data = r.sub(r'#\1', data)
        fo = open(f, 'w')
        fo.write(data)
        fo.close()
    git_debug_snapshot(args, "disable unsatisfied Makefile parts")

    if (args.kup or args.kup_test):
        upload_release(args, rel_prep, logwrite=logwrite)

    logwrite('Done!')
    return 0
Exemplo n.º 3
0
def process(kerneldir,
            outdir,
            copy_list_file,
            git_revision=None,
            clean=False,
            refresh=False,
            base_name="Linux",
            gitdebug=False,
            verbose=False,
            extra_driver=[],
            kup=False,
            kup_test=False,
            logwrite=lambda x: None,
            git_tracked_version=False):
    class Args(object):
        def __init__(self, kerneldir, outdir, copy_list_file, git_revision,
                     clean, refresh, base_name, gitdebug, verbose,
                     extra_driver, kup, kup_test):
            self.kerneldir = kerneldir
            self.outdir = outdir
            self.copy_list = copy_list_file
            self.git_revision = git_revision
            self.clean = clean
            self.refresh = refresh
            self.base_name = base_name
            self.gitdebug = gitdebug
            self.verbose = verbose
            self.extra_driver = extra_driver
            self.kup = kup
            self.kup_test = kup_test

    def git_paranoia(tree=None, logwrite=lambda x: None):
        data = git.paranoia(tree)
        if (data['r'] != 0):
            logwrite('Cannot use %s' % tree)
            logwrite('%s' % data['output'])
            sys.exit(data['r'])
        else:
            logwrite('Validated tree: %s' % tree)

    args = Args(kerneldir, outdir, copy_list_file, git_revision, clean,
                refresh, base_name, gitdebug, verbose, extra_driver, kup,
                kup_test)
    rel_prep = None

    # start processing ...
    if (args.kup or args.kup_test):
        git_paranoia(source_dir, logwrite)
        git_paranoia(kerneldir, logwrite)

        rel_describe = git.describe(rev=None,
                                    tree=source_dir,
                                    extra_args=['--dirty'])
        release = os.path.basename(args.outdir)
        version = release.replace("backports-", "")

        rel_prep = get_rel_prep(version)
        if (not rel_prep):
            logwrite('Invalid backports release name: %s' % release)
            logwrite('For rules on the release name see upload_release()')
            sys.exit(1)
        rel_type = "linux-stable"
        if (not rel_prep['stable']):
            rel_type = "linux-next"
        if (rel_prep['expected_tag'] != rel_describe):
            logwrite('Unexpected %s based backports release tag on' % rel_type)
            logwrite('the backports tree tree: %s\n' % rel_describe)
            logwrite('You asked to make a release with this ')
            logwrite('directory name: %s' % release)
            logwrite('The actual expected tag we should find on')
            logwrite('the backports tree then is: %s\n' %
                     rel_prep['expected_tag'])
            logwrite('For rules on the release name see upload_release()')
            sys.exit(1)

    copy_list = read_copy_list(args.copy_list)
    deplist = read_dependencies(os.path.join(source_dir, 'dependencies'))

    # validate output directory
    check_output_dir(args.outdir, args.clean)

    # do the copy
    backport_files = [(x, x) for x in [
        'Kconfig',
        'Makefile',
        'Makefile.build',
        'Makefile.kernel',
        '.gitignore',
        'Makefile.real',
        'compat/',
        'backport-include/',
        'kconf/',
        'defconfigs/',
        'scripts/',
        '.blacklist.map',
        'udev/',
    ]]
    if not args.git_revision:
        logwrite('Copy original source files ...')
    else:
        logwrite('Get original source files from git ...')

    copy_files(os.path.join(source_dir, 'backport'), backport_files,
               args.outdir)

    git_debug_init(args)

    if not args.git_revision:
        copy_files(args.kerneldir, copy_list, args.outdir)
    else:
        copy_git_files(args.kerneldir, copy_list, args.git_revision,
                       args.outdir)

    # FIXME: should we add a git version of this (e.g. --git-extra-driver)?
    for src, copy_list in args.extra_driver:
        if (args.kup or args.kup_test):
            git_paranoia(src)
        copy_files(src, read_copy_list(open(copy_list, 'r')), args.outdir)

    git_debug_snapshot(args, 'Add driver sources')

    disable_list = add_automatic_backports(args)
    if disable_list:
        bpcfg = kconfig.ConfigTree(
            os.path.join(args.outdir, 'compat', 'Kconfig'))
        bpcfg.disable_symbols(disable_list)
    git_debug_snapshot(args, 'Add automatic backports')

    logwrite('Apply patches ...')
    patches = []
    sempatches = []
    for root, dirs, files in os.walk(os.path.join(source_dir, 'patches')):
        for f in files:
            if f.endswith('.patch'):
                patches.append(os.path.join(root, f))
            if f.endswith('.cocci'):
                sempatches.append(os.path.join(root, f))
    patches.sort()
    prefix_len = len(os.path.join(source_dir, 'patches')) + 1
    for pfile in patches:
        print_name = pfile[prefix_len:]
        # read the patch file
        p = patch.fromfile(pfile)
        # complain if it's not a patch
        if not p:
            raise Exception('No patch content found in %s' % print_name)
        # leading / seems to be stripped?
        if 'dev/null' in p.items[0].source:
            raise Exception(
                'Patches creating files are not supported (in %s)' %
                print_name)
        # check if the first file the patch touches exists, if so
        # assume the patch needs to be applied -- otherwise continue
        patched_file = '/'.join(p.items[0].source.split('/')[1:])
        fullfn = os.path.join(args.outdir, patched_file)
        if not os.path.exists(fullfn):
            if args.verbose:
                logwrite("Not applying %s, not needed" % print_name)
            continue
        if args.verbose:
            logwrite("Applying patch %s" % print_name)

        if args.refresh:
            # but for refresh, of course look at all files the patch touches
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(args.outdir, patched_file)
                shutil.copyfile(fullfn, fullfn + '.orig_file')

        process = subprocess.Popen(['patch', '-p1'],
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT,
                                   stdin=subprocess.PIPE,
                                   close_fds=True,
                                   universal_newlines=True,
                                   cwd=args.outdir)
        output = process.communicate(input=open(pfile, 'r').read())[0]
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)
        if process.returncode != 0:
            if not args.verbose:
                logwrite("Failed to apply changes from %s" % print_name)
                for line in output:
                    logwrite('> %s' % line)
            return 2

        if args.refresh:
            pfilef = open(pfile + '.tmp', 'a')
            pfilef.write(p.top_header)
            pfilef.flush()
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(args.outdir, patched_file)
                process = subprocess.Popen([
                    'diff', '-p', '-u', patched_file + '.orig_file',
                    patched_file, '--label', 'a/' + patched_file, '--label',
                    'b/' + patched_file
                ],
                                           stdout=pfilef,
                                           close_fds=True,
                                           universal_newlines=True,
                                           cwd=args.outdir)
                process.wait()
                os.unlink(fullfn + '.orig_file')
                if not process.returncode in (0, 1):
                    logwrite("Failed to diff to refresh %s" % print_name)
                    pfilef.close()
                    os.unlink(pfile + '.tmp')
                    return 3
            pfilef.close()
            os.rename(pfile + '.tmp', pfile)

        # remove orig/rej files that patch sometimes creates
        for root, dirs, files in os.walk(args.outdir):
            for f in files:
                if f[-5:] == '.orig' or f[-4:] == '.rej':
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply backport patch %s" % print_name)

    sempatches.sort()
    with tempdir() as t:
        if not args.gitdebug:
            # combine all spatches
            fn = os.path.join(t, 'combined.cocci')
            f = open(fn, 'w')
            for cocci_file in sempatches:
                for l in open(cocci_file, 'r'):
                    f.write(l)
                f.write('\n')
            f.close()
            sempatches = [fn]
            prefix_len = 0
        else:
            prefix_len = len(os.path.join(source_dir, 'patches')) + 1
        for cocci_file in sempatches:
            print_name = cocci_file[prefix_len:]
            if args.verbose:
                logwrite("Applying patch %s" % print_name)

            process = subprocess.Popen([
                'spatch', '--sp-file', cocci_file, '--in-place',
                '--backup-suffix', '.cocci_backup', '--dir', '.'
            ],
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.STDOUT,
                                       close_fds=True,
                                       universal_newlines=True,
                                       cwd=args.outdir)
            output = process.communicate()[0]
            output = output.split('\n')
            if output[-1] == '':
                output = output[:-1]
            if args.verbose:
                for line in output:
                    logwrite('> %s' % line)
            if process.returncode != 0:
                if not args.verbose:
                    logwrite("Failed to apply changes from %s" % print_name)
                    for line in output:
                        logwrite('> %s' % line)
                return 2

            # remove cocci_backup files
            for root, dirs, files in os.walk(args.outdir):
                for f in files:
                    if f.endswith('.cocci_backup'):
                        os.unlink(os.path.join(root, f))
            git_debug_snapshot(args, "apply backport patch %s" % print_name)

    # some post-processing is required
    configtree = kconfig.ConfigTree(os.path.join(args.outdir, 'Kconfig'))
    logwrite('Modify Kconfig tree ...')
    configtree.prune_sources(ignore=['Kconfig.kernel', 'Kconfig.versions'])
    git_debug_snapshot(args, "prune Kconfig tree")
    configtree.force_tristate_modular()
    git_debug_snapshot(args, "force tristate options modular")
    configtree.modify_selects()
    git_debug_snapshot(args, "convert select to depends on")

    # write the versioning file
    if git_tracked_version:
        backports_version = "(see git)"
        kernel_version = "(see git)"
    else:
        backports_version = git.describe(tree=source_dir,
                                         extra_args=['--long'])
        kernel_version = git.describe(rev=args.git_revision or 'HEAD',
                                      tree=args.kerneldir,
                                      extra_args=['--long'])
    f = open(os.path.join(args.outdir, 'versions'), 'w')
    f.write('BACKPORTS_VERSION="%s"\n' % backports_version)
    f.write('BACKPORTED_KERNEL_VERSION="%s"\n' % kernel_version)
    f.write('BACKPORTED_KERNEL_NAME="%s"\n' % args.base_name)
    if git_tracked_version:
        f.write(
            'BACKPORTS_GIT_TRACKED="backport tracker ID: $(shell git rev-parse HEAD 2>/dev/null || echo \'not built in git tree\')"\n'
        )
    f.close()

    symbols = configtree.symbols()

    # write local symbol list -- needed during build
    f = open(os.path.join(args.outdir, '.local-symbols'), 'w')
    for sym in symbols:
        f.write('%s=\n' % sym)
    f.close()

    git_debug_snapshot(args, "add versions/symbols files")

    logwrite('Rewrite Makefiles and Kconfig files ...')

    # rewrite Makefile and source symbols
    regexes = []
    for some_symbols in [
            symbols[i:i + 50] for i in range(0, len(symbols), 50)
    ]:
        r = 'CONFIG_((' + '|'.join([s + '(_MODULE)?' for s in some_symbols
                                    ]) + ')([^A-Za-z0-9_]|$))'
        regexes.append(re.compile(r, re.MULTILINE))
    for root, dirs, files in os.walk(args.outdir):
        # don't go into .git dir (possible debug thing)
        if '.git' in dirs:
            dirs.remove('.git')
        for f in files:
            data = open(os.path.join(root, f), 'r').read()
            for r in regexes:
                data = r.sub(r'CPTCFG_\1', data)
            data = re.sub(r'\$\(srctree\)', '$(backport_srctree)', data)
            data = re.sub(r'-Idrivers', '-I$(backport_srctree)/drivers', data)
            fo = open(os.path.join(root, f), 'w')
            fo.write(data)
            fo.close()

    git_debug_snapshot(args, "rename config symbol / srctree usage")

    # disable unbuildable Kconfig symbols and stuff Makefiles that doesn't exist
    maketree = make.MakeTree(os.path.join(args.outdir, 'Makefile.kernel'))
    disable_kconfig = []
    disable_makefile = []
    for sym in maketree.get_impossible_symbols():
        disable_kconfig.append(sym[7:])
        disable_makefile.append(sym[7:])

    configtree.disable_symbols(disable_kconfig)
    git_debug_snapshot(args, "disable impossible kconfig symbols")

    # add kernel version dependencies to Kconfig, from the dependency list
    # we read previously
    for sym in tuple(deplist.keys()):
        new = []
        for dep in deplist[sym]:
            if "kconfig:" in dep:
                kconfig_expr = dep.replace('kconfig: ', '')
                new.append(kconfig_expr)
            elif (dep == "DISABLE"):
                new.append('BACKPORT_DISABLED_KCONFIG_OPTION')
            else:
                new.append('!BACKPORT_KERNEL_%s' % dep.replace('.', '_'))
        deplist[sym] = new
    configtree.add_dependencies(deplist)
    git_debug_snapshot(args, "add kernel version dependencies")

    # disable things in makefiles that can't be selected and that the
    # build shouldn't recurse into because they don't exist -- if we
    # don't do that then a symbol from the kernel could cause the build
    # to attempt to recurse and fail
    #
    # Note that we split the regex after 50 symbols, this is because of a
    # limitation in the regex implementation (it only supports 100 nested
    # groups -- 50 seemed safer and is still fast)
    regexes = []
    for some_symbols in [
            disable_makefile[i:i + 50]
            for i in range(0, len(disable_makefile), 50)
    ]:
        r = '^([^#].*((CPTCFG|CONFIG)_(' + '|'.join([s for s in some_symbols
                                                     ]) + ')))'
        regexes.append(re.compile(r, re.MULTILINE))
    for f in maketree.get_makefiles():
        data = open(f, 'r').read()
        for r in regexes:
            data = r.sub(r'#\1', data)
        fo = open(f, 'w')
        fo.write(data)
        fo.close()
    git_debug_snapshot(args, "disable unsatisfied Makefile parts")

    if (args.kup or args.kup_test):
        upload_release(args, rel_prep, logwrite=logwrite)

    logwrite('Done!')
    return 0
Exemplo n.º 4
0
def apply_patches(args, desc, source_dir, patch_src, target_dir, logwrite=lambda x:None):
    """
    Given a path of a directories of patches and SmPL patches apply
    them on the target directory. If requested refresh patches, or test
    a specific SmPL patch.
    """
    logwrite('Applying patches from %s to %s ...' % (patch_src, target_dir))
    test_cocci = args.test_cocci or args.profile_cocci
    test_cocci_found = False
    patches = []
    sempatches = []
    for root, dirs, files in os.walk(os.path.join(source_dir, patch_src)):
        for f in files:
            if not test_cocci and f.endswith('.patch'):
                patches.append(os.path.join(root, f))
            if f.endswith('.cocci'):
                if test_cocci:
                    if f not in test_cocci:
                        continue
                    test_cocci_found = True
                    if args.test_cocci:
                        logwrite("Testing Coccinelle SmPL patch: %s" % test_cocci)
                    elif args.profile_cocci:
                        logwrite("Profiling Coccinelle SmPL patch: %s" % test_cocci)
                sempatches.append(os.path.join(root, f))
    patches.sort()
    prefix_len = len(os.path.join(source_dir, patch_src)) + 1
    for pfile in patches:
        print_name = pfile[prefix_len:]
        # read the patch file
        p = patch.fromfile(pfile)
        # complain if it's not a patch
        if not p:
            raise Exception('No patch content found in %s' % print_name)
        # leading / seems to be stripped?
        if 'dev/null' in p.items[0].source:
            raise Exception('Patches creating files are not supported (in %s)' % print_name)
        # check if the first file the patch touches exists, if so
        # assume the patch needs to be applied -- otherwise continue
        patched_file = '/'.join(p.items[0].source.split('/')[1:])
        fullfn = os.path.join(target_dir, patched_file)
        if not os.path.exists(fullfn):
            if args.verbose:
                logwrite("Not applying %s, not needed" % print_name)
            continue
        if args.verbose:
            logwrite("Applying patch %s" % print_name)

        if args.refresh:
            # but for refresh, of course look at all files the patch touches
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(target_dir, patched_file)
                shutil.copyfile(fullfn, fullfn + '.orig_file')

        process = subprocess.Popen(['patch', '-p1'], stdout=subprocess.PIPE,
                                   stderr=subprocess.STDOUT, stdin=subprocess.PIPE,
                                   close_fds=True, universal_newlines=True,
                                   cwd=target_dir)
        output = process.communicate(input=open(pfile, 'r').read())[0]
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)
        if process.returncode != 0:
            if not args.verbose:
                logwrite("Failed to apply changes from %s" % print_name)
                for line in output:
                    logwrite('> %s' % line)
#            raise Exception('Patch failed')

        if args.refresh:
            pfilef = open(pfile + '.tmp', 'a')
            pfilef.write(p.top_header)
            pfilef.flush()
            for patchitem in p.items:
                patched_file = '/'.join(patchitem.source.split('/')[1:])
                fullfn = os.path.join(target_dir, patched_file)
                process = subprocess.Popen(['diff', '-p', '-u', patched_file + '.orig_file', patched_file,
                                            '--label', 'a/' + patched_file,
                                            '--label', 'b/' + patched_file],
                                           stdout=pfilef, close_fds=True,
                                           universal_newlines=True, cwd=target_dir)
                process.wait()
                os.unlink(fullfn + '.orig_file')
                if not process.returncode in (0, 1):
                    logwrite("Failed to diff to refresh %s" % print_name)
                    pfilef.close()
                    os.unlink(pfile + '.tmp')
                    raise Exception('Refresh failed')
            pfilef.close()
            os.rename(pfile + '.tmp', pfile)

        # remove orig/rej files that patch sometimes creates
        for root, dirs, files in os.walk(target_dir):
            for f in files:
                if f[-5:] == '.orig' or f[-4:] == '.rej':
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply %s patch %s" % (desc, print_name))

    sempatches.sort()
    prefix_len = len(os.path.join(source_dir, patch_src)) + 1

    for cocci_file in sempatches:
        # Until Coccinelle picks this up
        pycocci = os.path.join(source_dir, 'devel/pycocci')
        cmd = [pycocci, cocci_file]
        extra_spatch_args = []
        if args.profile_cocci:
            cmd.append('--profile-cocci')
        cmd.append(os.path.abspath(target_dir))
        print_name = cocci_file[prefix_len:]
        if args.verbose:
            logwrite("Applying SmPL patch %s" % print_name)
        sprocess = subprocess.Popen(cmd,
                                    stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
                                    close_fds=True, universal_newlines=True,
                                    cwd=target_dir)
        output = sprocess.communicate()[0]
        sprocess.wait()
        if sprocess.returncode != 0:
            logwrite("Failed to process SmPL patch %s" % print_name)
            raise Exception('SmPL patch failed')
        output = output.split('\n')
        if output[-1] == '':
            output = output[:-1]
        if args.verbose:
            for line in output:
                logwrite('> %s' % line)

        # remove cocci_backup files
        for root, dirs, files in os.walk(target_dir):
            for f in files:
                if f.endswith('.cocci_backup'):
                    os.unlink(os.path.join(root, f))
        git_debug_snapshot(args, "apply %s SmPL patch %s" % (desc, print_name))

    if test_cocci and test_cocci_found:
        logwrite('Done!')
        sys.exit(0)