def get_recipe_upgrade_status(recipes=None): pkgs_list = [] data_copy_list = [] copy_vars = ('SRC_URI', 'PV', 'GITDIR', 'DL_DIR', 'PN', 'CACHE', 'PERSISTENT_DIR', 'BB_URI_HEADREVS', 'UPSTREAM_CHECK_COMMITS', 'UPSTREAM_CHECK_GITTAGREGEX', 'UPSTREAM_CHECK_REGEX', 'UPSTREAM_CHECK_URI', 'UPSTREAM_VERSION_UNKNOWN', 'RECIPE_MAINTAINER', 'RECIPE_NO_UPDATE_REASON', 'RECIPE_UPSTREAM_VERSION', 'RECIPE_UPSTREAM_DATE', 'CHECK_DATE', ) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue data_copy = bb.data.init() for var in copy_vars: data_copy.setVar(var, data.getVar(var)) for k in data: if k.startswith('SRCREV'): data_copy.setVar(k, data.getVar(k)) data_copy_list.append(data_copy) from concurrent.futures import ProcessPoolExecutor with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) return pkgs_list
def test_parse_recipe(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) self.assertEqual(testrecipe, rd.getVar('PN'))
def test_parse_recipe_initial_datastore(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) dcopy = bb.data.createCopy(tinfoil.config_data) dcopy.setVar('MYVARIABLE', 'somevalue') rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy) # Check we can get variable values self.assertEqual('somevalue', rd.getVar('MYVARIABLE'))
def get_recipe_upgrade_status(recipes=None): pkgs_list = [] with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue uv = get_recipe_upstream_version(data) pn = data.getVar('PN') cur_ver = uv['current_version'] upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN') if not uv['version']: status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" else: cmp = vercmp_string(uv['current_version'], uv['version']) if cmp == -1: status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN" elif cmp == 0: status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN" else: status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" next_ver = uv['version'] if uv['version'] else "N/A" revision = uv['revision'] if uv['revision'] else "N/A" maintainer = data.getVar('RECIPE_MAINTAINER') no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') pkgs_list.append((pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason)) return pkgs_list
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True): """Parse the specified recipe""" try: recipefile = tinfoil.get_recipe_file(pn) except bb.providers.NoProvider as e: logger.error(str(e)) return None if appends: append_files = tinfoil.get_file_appends(recipefile) if filter_workspace: # Filter out appends from the workspace append_files = [path for path in append_files if not path.startswith(config.workspace_path)] else: append_files = None return tinfoil.parse_recipe_file(recipefile, appends, append_files)
def test_parse_recipe_copy_expand(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) # Check we can get variable values self.assertEqual(testrecipe, rd.getVar('PN')) # Check that expanding a value that includes a variable reference works self.assertEqual(testrecipe, rd.getVar('BPN')) # Now check that changing the referenced variable's value in a copy gives that # value when expanding localdata = bb.data.createCopy(rd) localdata.setVar('PN', 'hello') self.assertEqual('hello', localdata.getVar('BPN'))
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True): """Parse the specified recipe""" try: recipefile = tinfoil.get_recipe_file(pn) except bb.providers.NoProvider as e: logger.error(str(e)) return None if appends: append_files = tinfoil.get_file_appends(recipefile) if filter_workspace: # Filter out appends from the workspace append_files = [ path for path in append_files if not path.startswith(config.workspace_path) ] else: append_files = None try: rd = tinfoil.parse_recipe_file(recipefile, appends, append_files) except Exception as e: logger.error(str(e)) return None return rd
def get_recipe_upgrade_status(recipes=None): pkgs_list = [] data_copy_list = [] copy_vars = ( 'SRC_URI', 'PV', 'DL_DIR', 'PN', 'CACHE', 'PERSISTENT_DIR', 'BB_URI_HEADREVS', 'UPSTREAM_CHECK_COMMITS', 'UPSTREAM_CHECK_GITTAGREGEX', 'UPSTREAM_CHECK_REGEX', 'UPSTREAM_CHECK_URI', 'UPSTREAM_VERSION_UNKNOWN', 'RECIPE_MAINTAINER', 'RECIPE_NO_UPDATE_REASON', 'RECIPE_UPSTREAM_VERSION', 'RECIPE_UPSTREAM_DATE', 'CHECK_DATE', 'FETCHCMD_bzr', 'FETCHCMD_ccrc', 'FETCHCMD_cvs', 'FETCHCMD_git', 'FETCHCMD_hg', 'FETCHCMD_npm', 'FETCHCMD_osc', 'FETCHCMD_p4', 'FETCHCMD_repo', 'FETCHCMD_s3', 'FETCHCMD_svn', 'FETCHCMD_wget', ) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue data_copy = bb.data.init() for var in copy_vars: data_copy.setVar(var, data.getVar(var)) for k in data: if k.startswith('SRCREV'): data_copy.setVar(k, data.getVar(k)) data_copy_list.append(data_copy) from concurrent.futures import ProcessPoolExecutor with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) return pkgs_list
def main(): docfiles = [] metadirs = [] bbvars = set() undocumented = [] docconf = "" onlydoctags = False # Collect and validate input try: opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"]) except getopt.GetoptError as err: print('%s' % str(err)) usage() sys.exit(2) for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) elif o == '-d': if os.path.isfile(a): docfiles.append(a) else: print('ERROR: documentation file %s is not a regular file' % a) sys.exit(3) elif o == '-m': if os.path.isdir(a): metadirs.append(a) else: print('ERROR: meta directory %s is not a directory' % a) sys.exit(4) elif o == "-t": if os.path.isfile(a): docconf = a elif o == "-T": onlydoctags = True else: assert False, "unhandled option" if len(docfiles) == 0: print('ERROR: no docfile specified') usage() sys.exit(5) if len(metadirs) == 0: print('ERROR: no metadir specified') usage() sys.exit(6) if onlydoctags and docconf == "": print('ERROR: no docconf specified') usage() sys.exit(7) prog = re.compile("^[^a-z]*$") with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) parser = bb.codeparser.PythonParser('parser', None) datastore = tinfoil.config_data def bbvars_update(data): if prog.match(data): bbvars.add(data) if tinfoil.config_data.getVarFlag(data, 'python'): try: parser.parse_python(tinfoil.config_data.getVar(data)) except bb.data_smart.ExpansionError: pass for var in parser.references: if prog.match(var): bbvars.add(var) else: try: expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data) for var in expandedVar.references: if prog.match(var): bbvars.add(var) except bb.data_smart.ExpansionError: pass # Use tinfoil to collect all the variable names globally for data in datastore: bbvars_update(data) # Collect variables from all recipes for recipe in tinfoil.all_recipe_files(): print("Checking %s" % recipe) for data in tinfoil.parse_recipe_file(recipe): bbvars_update(data) documented_vars = collect_documented_vars(docfiles) # Check each var for documentation varlen = 0 for v in bbvars: if len(v) > varlen: varlen = len(v) if not bbvar_is_documented(v, documented_vars): undocumented.append(v) undocumented.sort() varlen = varlen + 1 # Report all undocumented variables print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars))) header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7)) print(header) print(str("").ljust(len(header), '=')) for v in undocumented: doctag = bbvar_doctag(v, docconf) if not onlydoctags or not doctag == "": print('%s%s' % (v.ljust(varlen), doctag))
def main(): docfiles = [] metadirs = [] bbvars = set() undocumented = [] docconf = "" onlydoctags = False # Collect and validate input try: opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"]) except getopt.GetoptError as err: print('%s' % str(err)) usage() sys.exit(2) for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) elif o == '-d': if os.path.isfile(a): docfiles.append(a) else: print('ERROR: documentation file %s is not a regular file' % a) sys.exit(3) elif o == '-m': if os.path.isdir(a): metadirs.append(a) else: print('ERROR: meta directory %s is not a directory' % a) sys.exit(4) elif o == "-t": if os.path.isfile(a): docconf = a elif o == "-T": onlydoctags = True else: assert False, "unhandled option" if len(docfiles) == 0: print('ERROR: no docfile specified') usage() sys.exit(5) if len(metadirs) == 0: print('ERROR: no metadir specified') usage() sys.exit(6) if onlydoctags and docconf == "": print('ERROR: no docconf specified') usage() sys.exit(7) prog = re.compile("^[^a-z]*$") with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) parser = bb.codeparser.PythonParser('parser', None) datastore = tinfoil.config_data def bbvars_update(data): if prog.match(data): bbvars.add(data) if tinfoil.config_data.getVarFlag(data, 'python'): try: parser.parse_python(tinfoil.config_data.getVar(data)) except bb.data_smart.ExpansionError: pass for var in parser.references: if prog.match(var): bbvars.add(var) else: try: expandedVar = datastore.expandWithRefs( datastore.getVar(data, False), data) for var in expandedVar.references: if prog.match(var): bbvars.add(var) except bb.data_smart.ExpansionError: pass # Use tinfoil to collect all the variable names globally for data in datastore: bbvars_update(data) # Collect variables from all recipes for recipe in tinfoil.all_recipe_files(): print("Checking %s" % recipe) for data in tinfoil.parse_recipe_file(recipe): bbvars_update(data) documented_vars = collect_documented_vars(docfiles) # Check each var for documentation varlen = 0 for v in bbvars: if len(v) > varlen: varlen = len(v) if not bbvar_is_documented(v, documented_vars): undocumented.append(v) undocumented.sort() varlen = varlen + 1 # Report all undocumented variables print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars))) header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7)) print(header) print(str("").ljust(len(header), '=')) for v in undocumented: doctag = bbvar_doctag(v, docconf) if not onlydoctags or not doctag == "": print('%s%s' % (v.ljust(varlen), doctag))
def print_deps(tinfoil, abcd_file, rn): try: info = tinfoil.get_recipe_info(rn) except Exception: print('Failed to get recipe info for: %s' % rn) return [] if not info: print('No recipe info found for: %s' % rn) return [] append_files = tinfoil.get_file_appends(info.fn) appends = True data = tinfoil.parse_recipe_file(info.fn, appends, append_files) src_uri = data.getVar('SRC_URI').split() lic = data.getVar('LICENSE') summary = data.getVar('SUMMARY') description = data.getVar('DESCRIPTION') homepage = data.getVar('HOMEPAGE') srcrev = data.getVar('SRCREV') branch = data.getVar('BRANCH') depends = data.getVar('DEPENDS').split() abcd_file.write('- id:\n') abcd_file.write(' package_manager: "Yocto"\n') abcd_file.write(' name: "%s"\n' % info.pn) abcd_file.write(' version: "%s"\n' % info.pv) abcd_file.write(' declared_lics:\n') abcd_file.write(' - "%s"\n' % lic) if summary: abcd_file.write(' description: "%s"\n' % summary) else: abcd_file.write(' description: "%s"\n' % description) abcd_file.write(' homepage_url: "%s"\n' % homepage) abcd_file.write(' source_artifact:\n') repos = [] for src in src_uri: # Strip options. # TODO: ignore files with apply=false? src = src.split(';', maxsplit=1)[0] src_type = src.split('://', maxsplit=1)[0] if src_type == 'file': # TODO: Get full path of patches and other files within the source # repo, not just the filesystem? fetch = bb.fetch2.Fetch([], data) local = fetch.localpath(src) abcd_file.write(' - "%s"\n' % local) else: abcd_file.write(' - "%s"\n' % src) if src_type != 'http' and src_type != 'https' and src_type != 'ftp' and src_type != 'ssh': repos.append(src) if len(repos) > 1: print('Multiple repos not fully supported yet. Pacakge: %s' % info.pn) for repo in repos: vcs_type, url = repo.split('://', maxsplit=1) abcd_file.write(' vcs:\n') if vcs_type == 'gitsm': vcs_type = 'git' abcd_file.write(' type: "%s"\n' % vcs_type) abcd_file.write(' url: "%s"\n' % url) # TODO: Actually support multiple repos here: abcd_file.write(' revision: "%s"\n' % srcrev) abcd_file.write(' branch: "%s"\n' % branch) abcd_file.write(' dependencies:\n') for dep in depends: abcd_file.write(' - "%s"\n' % dep) # TODO: search for transitive dependencies here? Each dependency will # get checked for its own dependencies sooner or later. return depends