Beispiel #1
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    data_copy_list = []
    copy_vars = (
        'SRC_URI',
        'PV',
        'GITDIR',
        'DL_DIR',
        'PN',
        'CACHE',
        'PERSISTENT_DIR',
        'BB_URI_HEADREVS',
        'UPSTREAM_CHECK_COMMITS',
        'UPSTREAM_CHECK_GITTAGREGEX',
        'UPSTREAM_CHECK_REGEX',
        'UPSTREAM_CHECK_URI',
        'UPSTREAM_VERSION_UNKNOWN',
        'RECIPE_MAINTAINER',
        'RECIPE_NO_UPDATE_REASON',
        'RECIPE_UPSTREAM_VERSION',
        'RECIPE_UPSTREAM_DATE',
        'CHECK_DATE',
    )

    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            data_copy = bb.data.init()
            for var in copy_vars:
                data_copy.setVar(var, data.getVar(var))
            for k in data:
                if k.startswith('SRCREV'):
                    data_copy.setVar(k, data.getVar(k))

            data_copy_list.append(data_copy)

    from concurrent.futures import ProcessPoolExecutor
    with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
        pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)

    return pkgs_list
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    data_copy_list = []
    copy_vars = ('SRC_URI',
                 'PV',
                 'GITDIR',
                 'DL_DIR',
                 'PN',
                 'CACHE',
                 'PERSISTENT_DIR',
                 'BB_URI_HEADREVS',
                 'UPSTREAM_CHECK_COMMITS',
                 'UPSTREAM_CHECK_GITTAGREGEX',
                 'UPSTREAM_CHECK_REGEX',
                 'UPSTREAM_CHECK_URI',
                 'UPSTREAM_VERSION_UNKNOWN',
                 'RECIPE_MAINTAINER',
                 'RECIPE_NO_UPDATE_REASON',
                 'RECIPE_UPSTREAM_VERSION',
                 'RECIPE_UPSTREAM_DATE',
                 'CHECK_DATE',
            )

    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            data_copy = bb.data.init()
            for var in copy_vars:
                data_copy.setVar(var, data.getVar(var))
            for k in data:
                if k.startswith('SRCREV'):
                    data_copy.setVar(k, data.getVar(k))

            data_copy_list.append(data_copy)

    from concurrent.futures import ProcessPoolExecutor
    with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
        pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)

    return pkgs_list
Beispiel #3
0
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            uv = get_recipe_upstream_version(data)

            pn = data.getVar('PN')
            cur_ver = uv['current_version']

            upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
            if not uv['version']:
                status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
            else:
                cmp = vercmp_string(uv['current_version'], uv['version'])
                if cmp == -1:
                    status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
                elif cmp == 0:
                    status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
                else:
                    status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"

            next_ver = uv['version'] if uv['version'] else "N/A"
            revision = uv['revision'] if uv['revision'] else "N/A"
            maintainer = data.getVar('RECIPE_MAINTAINER')
            no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')

            pkgs_list.append((pn, status, cur_ver, next_ver, maintainer,
                              revision, no_upgrade_reason))

    return pkgs_list
def get_recipe_upgrade_status(recipes=None):
    pkgs_list = []
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)

        if not recipes:
            recipes = tinfoil.all_recipe_files(variants=False)

        for fn in recipes:
            try:
                if fn.startswith("/"):
                    data = tinfoil.parse_recipe_file(fn)
                else:
                    data = tinfoil.parse_recipe(fn)
            except bb.providers.NoProvider:
                bb.note(" No provider for %s" % fn)
                continue

            unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
            if unreliable == "1":
                bb.note(" Skip package %s as upstream check unreliable" % pn)
                continue

            uv = get_recipe_upstream_version(data)

            pn = data.getVar('PN')
            cur_ver = uv['current_version']

            upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
            if not uv['version']:
                status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
            else:
                cmp = vercmp_string(uv['current_version'], uv['version'])
                if cmp == -1:
                    status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
                elif cmp == 0:
                    status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
                else:
                    status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"

            next_ver = uv['version'] if uv['version'] else "N/A"
            revision = uv['revision'] if uv['revision'] else "N/A"
            maintainer = data.getVar('RECIPE_MAINTAINER')
            no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')

            pkgs_list.append((pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason))

    return pkgs_list
Beispiel #5
0
def main():
    docfiles = []
    metadirs = []
    bbvars = set()
    undocumented = []
    docconf = ""
    onlydoctags = False

    # Collect and validate input
    try:
        opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
    except getopt.GetoptError as err:
        print('%s' % str(err))
        usage()
        sys.exit(2)

    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit(0)
        elif o == '-d':
            if os.path.isfile(a):
                docfiles.append(a)
            else:
                print('ERROR: documentation file %s is not a regular file' % a)
                sys.exit(3)
        elif o == '-m':
            if os.path.isdir(a):
                metadirs.append(a)
            else:
                print('ERROR: meta directory %s is not a directory' % a)
                sys.exit(4)
        elif o == "-t":
            if os.path.isfile(a):
                docconf = a
        elif o == "-T":
            onlydoctags = True
        else:
            assert False, "unhandled option"

    if len(docfiles) == 0:
        print('ERROR: no docfile specified')
        usage()
        sys.exit(5)

    if len(metadirs) == 0:
        print('ERROR: no metadir specified')
        usage()
        sys.exit(6)

    if onlydoctags and docconf == "":
        print('ERROR: no docconf specified')
        usage()
        sys.exit(7)

    prog = re.compile("^[^a-z]*$")
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)
        parser = bb.codeparser.PythonParser('parser', None)
        datastore = tinfoil.config_data

        def bbvars_update(data):
            if prog.match(data):
                bbvars.add(data)
            if tinfoil.config_data.getVarFlag(data, 'python'):
                try:
                    parser.parse_python(tinfoil.config_data.getVar(data))
                except bb.data_smart.ExpansionError:
                    pass
                for var in parser.references:
                    if prog.match(var):
                        bbvars.add(var)
            else:
                try:
                    expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data)
                    for var in expandedVar.references:
                        if prog.match(var):
                            bbvars.add(var)
                except bb.data_smart.ExpansionError:
                    pass

        # Use tinfoil to collect all the variable names globally
        for data in datastore:
            bbvars_update(data)

        # Collect variables from all recipes
        for recipe in tinfoil.all_recipe_files():
            print("Checking %s" % recipe)
            for data in tinfoil.parse_recipe_file(recipe):
                bbvars_update(data)

    documented_vars = collect_documented_vars(docfiles)

    # Check each var for documentation
    varlen = 0
    for v in bbvars:
        if len(v) > varlen:
            varlen = len(v)
        if not bbvar_is_documented(v, documented_vars):
            undocumented.append(v)
    undocumented.sort()
    varlen = varlen + 1

    # Report all undocumented variables
    print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars)))
    header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
    print(header)
    print(str("").ljust(len(header), '='))
    for v in undocumented:
        doctag = bbvar_doctag(v, docconf)
        if not onlydoctags or not doctag == "":
            print('%s%s' % (v.ljust(varlen), doctag))
Beispiel #6
0
def main():
    docfiles = []
    metadirs = []
    bbvars = set()
    undocumented = []
    docconf = ""
    onlydoctags = False

    # Collect and validate input
    try:
        opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"])
    except getopt.GetoptError as err:
        print('%s' % str(err))
        usage()
        sys.exit(2)

    for o, a in opts:
        if o in ('-h', '--help'):
            usage()
            sys.exit(0)
        elif o == '-d':
            if os.path.isfile(a):
                docfiles.append(a)
            else:
                print('ERROR: documentation file %s is not a regular file' % a)
                sys.exit(3)
        elif o == '-m':
            if os.path.isdir(a):
                metadirs.append(a)
            else:
                print('ERROR: meta directory %s is not a directory' % a)
                sys.exit(4)
        elif o == "-t":
            if os.path.isfile(a):
                docconf = a
        elif o == "-T":
            onlydoctags = True
        else:
            assert False, "unhandled option"

    if len(docfiles) == 0:
        print('ERROR: no docfile specified')
        usage()
        sys.exit(5)

    if len(metadirs) == 0:
        print('ERROR: no metadir specified')
        usage()
        sys.exit(6)

    if onlydoctags and docconf == "":
        print('ERROR: no docconf specified')
        usage()
        sys.exit(7)

    prog = re.compile("^[^a-z]*$")
    with bb.tinfoil.Tinfoil() as tinfoil:
        tinfoil.prepare(config_only=False)
        parser = bb.codeparser.PythonParser('parser', None)
        datastore = tinfoil.config_data

        def bbvars_update(data):
            if prog.match(data):
                bbvars.add(data)
            if tinfoil.config_data.getVarFlag(data, 'python'):
                try:
                    parser.parse_python(tinfoil.config_data.getVar(data))
                except bb.data_smart.ExpansionError:
                    pass
                for var in parser.references:
                    if prog.match(var):
                        bbvars.add(var)
            else:
                try:
                    expandedVar = datastore.expandWithRefs(
                        datastore.getVar(data, False), data)
                    for var in expandedVar.references:
                        if prog.match(var):
                            bbvars.add(var)
                except bb.data_smart.ExpansionError:
                    pass

        # Use tinfoil to collect all the variable names globally
        for data in datastore:
            bbvars_update(data)

        # Collect variables from all recipes
        for recipe in tinfoil.all_recipe_files():
            print("Checking %s" % recipe)
            for data in tinfoil.parse_recipe_file(recipe):
                bbvars_update(data)

    documented_vars = collect_documented_vars(docfiles)

    # Check each var for documentation
    varlen = 0
    for v in bbvars:
        if len(v) > varlen:
            varlen = len(v)
        if not bbvar_is_documented(v, documented_vars):
            undocumented.append(v)
    undocumented.sort()
    varlen = varlen + 1

    # Report all undocumented variables
    print('Found %d undocumented bb variables (out of %d):' %
          (len(undocumented), len(bbvars)))
    header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7))
    print(header)
    print(str("").ljust(len(header), '='))
    for v in undocumented:
        doctag = bbvar_doctag(v, docconf)
        if not onlydoctags or not doctag == "":
            print('%s%s' % (v.ljust(varlen), doctag))