def test_expand(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(True) expr = '${@os.getpid()}' pid = tinfoil.config_data.expand(expr) if not pid: self.fail('Unable to expand "%s" - returned %s' % (expr, pid))
def test_datastore_operations(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) # Test setVarFlag() / getVarFlag() tinfoil.config_data.setVarFlag('TESTVAR', 'flagname', 'flagval') value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') self.assertEqual(value, 'flagval', 'Value set using config_data.setVarFlag() is not reflected in config_data.getVarFlag()') # Test delVarFlag() tinfoil.config_data.setVarFlag('TESTVAR', 'otherflag', 'othervalue') tinfoil.config_data.delVarFlag('TESTVAR', 'flagname') value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') self.assertEqual(value, None, 'Varflag deleted using config_data.delVarFlag() is not reflected in config_data.getVarFlag()') value = tinfoil.config_data.getVarFlag('TESTVAR', 'otherflag') self.assertEqual(value, 'othervalue', 'Varflag deleted using config_data.delVarFlag() caused unrelated flag to be removed') # Test delVar() tinfoil.config_data.setVar('TESTVAR', 'varvalue') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, 'varvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()') tinfoil.config_data.delVar('TESTVAR') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, None, 'Variable deleted using config_data.delVar() appears to still have a value') # Test renameVar() tinfoil.config_data.setVar('TESTVAROLD', 'origvalue') tinfoil.config_data.renameVar('TESTVAROLD', 'TESTVARNEW') value = tinfoil.config_data.getVar('TESTVAROLD') self.assertEqual(value, None, 'Variable renamed using config_data.renameVar() still seems to exist') value = tinfoil.config_data.getVar('TESTVARNEW') self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') # Test overrides tinfoil.config_data.setVar('TESTVAR', 'original') tinfoil.config_data.setVar('TESTVAR_overrideone', 'one') tinfoil.config_data.setVar('TESTVAR_overridetwo', 'two') tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
def test_list_recipes(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) # Check pkg_pn checkpns = [ 'tar', 'automake', 'coreutils', 'm4-native', 'nativesdk-gcc' ] pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn for pn in checkpns: self.assertIn(pn, pkg_pn) # Check pkg_fn checkfns = { 'nativesdk-gcc': '^virtual:nativesdk:.*', 'coreutils': '.*/coreutils_.*.bb' } for fn, pn in tinfoil.cooker.recipecaches[''].pkg_fn.items(): if pn in checkpns: if pn in checkfns: self.assertTrue( re.match(checkfns[pn], fn), 'Entry for %s: %s did not match %s' % (pn, fn, checkfns[pn])) checkpns.remove(pn) if checkpns: self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns))
def test_getvar(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(True) machine = tinfoil.config_data.getVar('MACHINE') if not machine: self.fail('Unable to get MACHINE value - returned %s' % machine)
def setup_tinfoil(config_only=False, basepath=None, tracking=False): """Initialize tinfoil api from bitbake""" import scriptpath orig_cwd = os.path.abspath(os.curdir) try: if basepath: os.chdir(basepath) bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) try: tinfoil.prepare(config_only) tinfoil.logger.setLevel(logger.getEffectiveLevel()) except bb.tinfoil.TinfoilUIException: tinfoil.shutdown() raise DevtoolError('Failed to start bitbake environment') except: tinfoil.shutdown() raise finally: os.chdir(orig_cwd) return tinfoil
def test_variable_history(self): # Basic test to ensure that variable history works when tracking=True with bb.tinfoil.Tinfoil(tracking=True) as tinfoil: tinfoil.prepare(config_only=False, quiet=2) # Note that _tracking for any datastore we get will be # false here, that's currently expected - so we can't check # for that history = tinfoil.config_data.varhistory.variable('DL_DIR') for entry in history: if entry['file'].endswith('/bitbake.conf'): if entry['op'] in ['set', 'set?']: break else: self.fail('Did not find history entry setting DL_DIR in bitbake.conf. History: %s' % history) # Check it works for recipes as well testrecipe = 'zlib' rd = tinfoil.parse_recipe(testrecipe) history = rd.varhistory.variable('LICENSE') bbfound = -1 recipefound = -1 for i, entry in enumerate(history): if entry['file'].endswith('/bitbake.conf'): if entry['detail'] == 'INVALID' and entry['op'] in ['set', 'set?']: bbfound = i elif entry['file'].endswith('.bb'): if entry['op'] == 'set': recipefound = i if bbfound == -1: self.fail('Did not find history entry setting LICENSE in bitbake.conf parsing %s recipe. History: %s' % (testrecipe, history)) if recipefound == -1: self.fail('Did not find history entry setting LICENSE in %s recipe. History: %s' % (testrecipe, history)) if bbfound > recipefound: self.fail('History entry setting LICENSE in %s recipe and in bitbake.conf in wrong order. History: %s' % (testrecipe, history))
def test_datastore_operations(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) # Test setVarFlag() / getVarFlag() tinfoil.config_data.setVarFlag('TESTVAR', 'flagname', 'flagval') value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') self.assertEqual(value, 'flagval', 'Value set using config_data.setVarFlag() is not reflected in config_data.getVarFlag()') # Test delVarFlag() tinfoil.config_data.setVarFlag('TESTVAR', 'otherflag', 'othervalue') tinfoil.config_data.delVarFlag('TESTVAR', 'flagname') value = tinfoil.config_data.getVarFlag('TESTVAR', 'flagname') self.assertEqual(value, None, 'Varflag deleted using config_data.delVarFlag() is not reflected in config_data.getVarFlag()') value = tinfoil.config_data.getVarFlag('TESTVAR', 'otherflag') self.assertEqual(value, 'othervalue', 'Varflag deleted using config_data.delVarFlag() caused unrelated flag to be removed') # Test delVar() tinfoil.config_data.setVar('TESTVAR', 'varvalue') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, 'varvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()') tinfoil.config_data.delVar('TESTVAR') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, None, 'Variable deleted using config_data.delVar() appears to still have a value') # Test renameVar() tinfoil.config_data.setVar('TESTVAROLD', 'origvalue') tinfoil.config_data.renameVar('TESTVAROLD', 'TESTVARNEW') value = tinfoil.config_data.getVar('TESTVAROLD') self.assertEqual(value, None, 'Variable renamed using config_data.renameVar() still seems to exist') value = tinfoil.config_data.getVar('TESTVARNEW') self.assertEqual(value, 'origvalue', 'Variable renamed using config_data.renameVar() does not appear with new name') # Test overrides tinfoil.config_data.setVar('TESTVAR', 'original') tinfoil.config_data.setVar('TESTVAR:overrideone', 'one') tinfoil.config_data.setVar('TESTVAR:overridetwo', 'two') tinfoil.config_data.appendVar('OVERRIDES', ':overrideone') value = tinfoil.config_data.getVar('TESTVAR') self.assertEqual(value, 'one', 'Variable overrides not functioning correctly')
def get_depgraph(targets=['world'], failsafe=False): ''' Returns the dependency graph for the given target(s). The dependency graph is taken directly from DepTreeEvent. ''' depgraph = None with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) tinfoil.set_event_mask(['bb.event.NoProvider', 'bb.event.DepTreeGenerated', 'bb.command.CommandCompleted']) if not tinfoil.run_command('generateDepTreeEvent', targets, 'do_build'): raise RuntimeError('starting generateDepTreeEvent failed') while True: event = tinfoil.wait_event(timeout=1000) if event: if isinstance(event, bb.command.CommandFailed): raise RuntimeError('Generating dependency information failed: %s' % event.error) elif isinstance(event, bb.command.CommandCompleted): break elif isinstance(event, bb.event.NoProvider): if failsafe: # The event is informational, we will get information about the # remaining dependencies eventually and thus can ignore this # here like we do in get_signatures(), if desired. continue if event._reasons: raise RuntimeError('Nothing provides %s: %s' % (event._item, event._reasons)) else: raise RuntimeError('Nothing provides %s.' % (event._item)) elif isinstance(event, bb.event.DepTreeGenerated): depgraph = event._depgraph if depgraph is None: raise RuntimeError('Could not retrieve the depgraph.') return depgraph
def test_wait_event(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) # Need to drain events otherwise events that will be masked will still be in the queue while tinfoil.wait_event(0.25): pass tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) pattern = 'conf' res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') self.assertTrue(res) eventreceived = False waitcount = 5 while waitcount > 0: event = tinfoil.wait_event(1) if event: if isinstance(event, bb.command.CommandCompleted): break elif isinstance(event, bb.event.FilesMatchingFound): self.assertEqual(pattern, event._pattern) self.assertIn('qemuarm.conf', event._matches) eventreceived = True else: self.fail('Unexpected event: %s' % event) waitcount = waitcount - 1 self.assertNotEqual(waitcount, 0, 'Timed out waiting for CommandCompleted event from bitbake server') self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
def test_getvar_bb_origenv(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(True) origenv = tinfoil.config_data.getVar('BB_ORIGENV', False) if not origenv: self.fail('Unable to get BB_ORIGENV value - returned %s' % origenv) self.assertEqual(origenv.getVar('HOME', False), os.environ['HOME'])
def setup_tinfoil(config_only=False, basepath=None, tracking=False): """Initialize tinfoil api from bitbake""" import scriptpath orig_cwd = os.path.abspath(os.curdir) try: if basepath: os.chdir(basepath) bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) try: tinfoil.logger.setLevel(logger.getEffectiveLevel()) tinfoil.prepare(config_only) except bb.tinfoil.TinfoilUIException: tinfoil.shutdown() raise DevtoolError('Failed to start bitbake environment') except: tinfoil.shutdown() raise finally: os.chdir(orig_cwd) return tinfoil
def test_wait_event(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.set_event_mask(['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) # Need to drain events otherwise events that were masked may still be in the queue while tinfoil.wait_event(): pass pattern = 'conf' res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') self.assertTrue(res) eventreceived = False commandcomplete = False start = time.time() # Wait for 5s in total so we'd detect spurious heartbeat events for example while time.time() - start < 5: event = tinfoil.wait_event(1) if event: if isinstance(event, bb.command.CommandCompleted): commandcomplete = True elif isinstance(event, bb.event.FilesMatchingFound): self.assertEqual(pattern, event._pattern) self.assertIn('qemuarm.conf', event._matches) eventreceived = True elif isinstance(event, logging.LogRecord): continue else: self.fail('Unexpected event: %s' % event) self.assertTrue(commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server') self.assertTrue(eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): """ launch_cmd means directly run the command, don't need set rootfs or env vars. """ import bb.tinfoil import bb.build # Need a non-'BitBake' logger to capture the runner output targetlogger = logging.getLogger('TargetRunner') targetlogger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) targetlogger.addHandler(handler) tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(config_only=False, quiet=True) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") # Tell QemuTarget() whether need find rootfs/kernel or not if launch_cmd: tinfoil.config_data.setVar("FIND_ROOTFS", '0') else: tinfoil.config_data.setVar("FIND_ROOTFS", '1') recipedata = tinfoil.parse_recipe(pn) for key, value in overrides.items(): recipedata.setVar(key, value) logdir = recipedata.getVar("TEST_LOG_DIR") qemu = oeqa.targetcontrol.QemuTarget(recipedata, targetlogger, image_fstype) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. # Luckily QemuTarget doesn't need it after the constructor. tinfoil.shutdown() try: qemu.deploy() try: qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes) except bb.build.FuncFailed: msg = 'Failed to start QEMU - see the logs in %s' % logdir if os.path.exists(qemu.qemurunnerlog): with open(qemu.qemurunnerlog, 'r') as f: msg = msg + "Qemurunner log output from %s:\n%s" % (qemu.qemurunnerlog, f.read()) raise Exception(msg) yield qemu finally: targetlogger.removeHandler(handler) try: qemu.stop() except: pass
def get_recipe_upgrade_status(recipes=None): pkgs_list = [] data_copy_list = [] copy_vars = ( 'SRC_URI', 'PV', 'GITDIR', 'DL_DIR', 'PN', 'CACHE', 'PERSISTENT_DIR', 'BB_URI_HEADREVS', 'UPSTREAM_CHECK_COMMITS', 'UPSTREAM_CHECK_GITTAGREGEX', 'UPSTREAM_CHECK_REGEX', 'UPSTREAM_CHECK_URI', 'UPSTREAM_VERSION_UNKNOWN', 'RECIPE_MAINTAINER', 'RECIPE_NO_UPDATE_REASON', 'RECIPE_UPSTREAM_VERSION', 'RECIPE_UPSTREAM_DATE', 'CHECK_DATE', ) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue data_copy = bb.data.init() for var in copy_vars: data_copy.setVar(var, data.getVar(var)) for k in data: if k.startswith('SRCREV'): data_copy.setVar(k, data.getVar(k)) data_copy_list.append(data_copy) from concurrent.futures import ProcessPoolExecutor with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) return pkgs_list
def get_recipe_upgrade_status(recipes=None): pkgs_list = [] data_copy_list = [] copy_vars = ('SRC_URI', 'PV', 'GITDIR', 'DL_DIR', 'PN', 'CACHE', 'PERSISTENT_DIR', 'BB_URI_HEADREVS', 'UPSTREAM_CHECK_COMMITS', 'UPSTREAM_CHECK_GITTAGREGEX', 'UPSTREAM_CHECK_REGEX', 'UPSTREAM_CHECK_URI', 'UPSTREAM_VERSION_UNKNOWN', 'RECIPE_MAINTAINER', 'RECIPE_NO_UPDATE_REASON', 'RECIPE_UPSTREAM_VERSION', 'RECIPE_UPSTREAM_DATE', 'CHECK_DATE', ) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue data_copy = bb.data.init() for var in copy_vars: data_copy.setVar(var, data.getVar(var)) for k in data: if k.startswith('SRCREV'): data_copy.setVar(k, data.getVar(k)) data_copy_list.append(data_copy) from concurrent.futures import ProcessPoolExecutor with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor: pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list) return pkgs_list
def test_parse_recipe(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) self.assertEqual(testrecipe, rd.getVar('PN'))
def runqemu(pn, ssh=True): import bb.tinfoil import bb.build tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") import oe.recipeutils recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn) recipedata = oe.recipeutils.parse_recipe(recipefile, [], tinfoil.config_data) # The QemuRunner log is saved out, but we need to ensure it is at the right # log level (and then ensure that since it's a child of the BitBake logger, # we disable propagation so we don't then see the log events on the console) logger = logging.getLogger("BitBake.QemuRunner") logger.setLevel(logging.DEBUG) logger.propagate = False logdir = recipedata.getVar("TEST_LOG_DIR", True) qemu = oeqa.targetcontrol.QemuTarget(recipedata) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. # Luckily QemuTarget doesn't need it after the constructor. tinfoil.shutdown() # Setup bitbake logger as console handler is removed by tinfoil.shutdown bblogger = logging.getLogger("BitBake") bblogger.setLevel(logging.INFO) console = logging.StreamHandler(sys.stdout) bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") if sys.stdout.isatty(): bbformat.enable_color() console.setFormatter(bbformat) bblogger.addHandler(console) try: qemu.deploy() try: qemu.start(ssh=ssh) except bb.build.FuncFailed: raise Exception("Failed to start QEMU - see the logs in %s" % logdir) yield qemu finally: try: qemu.stop() except: pass
def runqemu(pn, ssh=True): import bb.tinfoil import bb.build tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") import oe.recipeutils recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn) recipedata = oe.recipeutils.parse_recipe(recipefile, [], tinfoil.config_data) # The QemuRunner log is saved out, but we need to ensure it is at the right # log level (and then ensure that since it's a child of the BitBake logger, # we disable propagation so we don't then see the log events on the console) logger = logging.getLogger('BitBake.QemuRunner') logger.setLevel(logging.DEBUG) logger.propagate = False logdir = recipedata.getVar("TEST_LOG_DIR", True) qemu = oeqa.targetcontrol.QemuTarget(recipedata) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. # Luckily QemuTarget doesn't need it after the constructor. tinfoil.shutdown() # Setup bitbake logger as console handler is removed by tinfoil.shutdown bblogger = logging.getLogger('BitBake') bblogger.setLevel(logging.INFO) console = logging.StreamHandler(sys.stdout) bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") if sys.stdout.isatty(): bbformat.enable_color() console.setFormatter(bbformat) bblogger.addHandler(console) try: qemu.deploy() try: qemu.start(ssh=ssh) except bb.build.FuncFailed: raise Exception('Failed to start QEMU - see the logs in %s' % logdir) yield qemu finally: try: qemu.stop() except: pass
def init_tinfoil(config_only=False, tracking=False): """Initialize the Bitbake tinfoil module""" import bb.tinfoil try: tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) except (SystemExit, bb.BBHandledException): raise GbpError("Failed to initialize tinfoil") tinfoil.prepare(config_only=config_only) return tinfoil
def main(): parser = ArgumentParser(description='Find all dependencies of a recipe.') parser.add_argument('recipe', metavar='recipe', help='a recipe to investigate') args = parser.parse_args() rn = args.recipe with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare() # These are the packages that bitbake assumes are provided by the host # system. They do not have recipes, so searching tinfoil for them will # not work. Anyway, by nature they are only build tools and will not be # distributed in an image. assume_provided = tinfoil.config_data.getVar('ASSUME_PROVIDED').split() if SKIP_BUILD_TOOLS: assume_provided.extend(KNOWN_BUILD_TOOLS) data = get_recipe_info(tinfoil, rn) if not data: print('Nothing to do!') return with open(rn + '-dependencies.yml', "w") as manifest_file: manifest_file.write('project:\n') data.depends = [] depends = data.getVar('DEPENDS').split() for dep in depends: if dep not in assume_provided: data.depends.append(dep) print_package(manifest_file, data, is_project=True) manifest_file.write(' scopes:\n') manifest_file.write(' - name: "all"\n') manifest_file.write(' delivered: true\n') if not data.depends: manifest_file.write(' dependencies: []\n') else: manifest_file.write(' dependencies:\n') recipe_info = dict([(rn, data)]) packages = [] find_dependencies(manifest_file, tinfoil, assume_provided, recipe_info, packages, rn, order=1) manifest_file.write('packages:\n') # Iterate through the list of packages found to print out their full # information. Skip the initial recipe since we already printed it out. for p in packages: if p is not rn: data = recipe_info[p] print_package(manifest_file, data, is_project=False)
def test_parse_recipe_initial_datastore(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) dcopy = bb.data.createCopy(tinfoil.config_data) dcopy.setVar('MYVARIABLE', 'somevalue') rd = tinfoil.parse_recipe_file(best[3], config_data=dcopy) # Check we can get variable values self.assertEqual('somevalue', rd.getVar('MYVARIABLE'))
def harvest_data(machines, recipes): import bb.tinfoil with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) corepath = layer_path("core", tinfoil.config_data) sys.path.append(os.path.join(corepath, "lib")) import oe.recipeutils import oe.patch # Queue of recipes that we're still looking for upstream releases for to_check = list(recipes) # Upstream releases upstreams = {} # Machines to recipes to versions versions = {} for machine in machines: print(f"Gathering data for {machine}...") os.environ["MACHINE"] = machine with bb.tinfoil.Tinfoil() as tinfoil: versions[machine] = {} tinfoil.prepare(quiet=2) for recipe in recipes: try: d = tinfoil.parse_recipe(recipe) except bb.providers.NoProvider: continue if recipe in to_check: try: info = oe.recipeutils.get_recipe_upstream_version(d) upstreams[recipe] = info["version"] to_check.remove(recipe) except (bb.providers.NoProvider, KeyError): pass details = versions[machine][recipe] = {} details["recipe"] = d.getVar("PN") details["version"] = trim_pv(d.getVar("PV")) details["fullversion"] = d.getVar("PV") details["patches"] = [extract_patch_info(p, d) for p in oe.patch.src_patches(d)] details["patched"] = bool(details["patches"]) details["patches_safe"] = safe_patches(details["patches"]) # Now backfill the upstream versions for machine in versions: for recipe in versions[machine]: data = versions[machine][recipe] data["upstream"] = upstreams[recipe] data["needs_update"] = needs_update(data["version"], data["upstream"]) return upstreams, versions
def setup_tinfoil(config_only=False): """Initialize tinfoil api from bitbake""" import scriptpath bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(config_only) tinfoil.logger.setLevel(logger.getEffectiveLevel()) return tinfoil
def setup_tinfoil(): """Initialize tinfoil api from bitbake""" import scriptpath bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) tinfoil.logger.setLevel(logger.getEffectiveLevel()) return tinfoil
def setup_tinfoil(): import scriptpath bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil import logging tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) tinfoil.logger.setLevel(logging.WARNING) return tinfoil
def get_recipe_upgrade_status(recipes=None): pkgs_list = [] with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) if not recipes: recipes = tinfoil.all_recipe_files(variants=False) for fn in recipes: try: if fn.startswith("/"): data = tinfoil.parse_recipe_file(fn) else: data = tinfoil.parse_recipe(fn) except bb.providers.NoProvider: bb.note(" No provider for %s" % fn) continue unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE') if unreliable == "1": bb.note(" Skip package %s as upstream check unreliable" % pn) continue uv = get_recipe_upstream_version(data) pn = data.getVar('PN') cur_ver = uv['current_version'] upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN') if not uv['version']: status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" else: cmp = vercmp_string(uv['current_version'], uv['version']) if cmp == -1: status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN" elif cmp == 0: status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN" else: status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN" next_ver = uv['version'] if uv['version'] else "N/A" revision = uv['revision'] if uv['revision'] else "N/A" maintainer = data.getVar('RECIPE_MAINTAINER') no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON') pkgs_list.append((pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason)) return pkgs_list
def setup_tinfoil(bitbakepath, enable_tracking): sys.path.insert(0, bitbakepath + '/lib') import bb.tinfoil import bb.cooker import bb.data try: tinfoil = bb.tinfoil.Tinfoil(tracking=enable_tracking) except TypeError: # old API tinfoil = bb.tinfoil.Tinfoil() if enable_tracking: tinfoil.cooker.enableDataTracking() tinfoil.prepare(config_only = True) return tinfoil
def _setup_tinfoil(bitbakepath, enable_tracking): sys.path.insert(0, bitbakepath + "/lib") import bb.tinfoil import bb.cooker import bb.data try: tinfoil = bb.tinfoil.Tinfoil(tracking=enable_tracking) except TypeError: # old API tinfoil = bb.tinfoil.Tinfoil() if enable_tracking: tinfoil.cooker.enableDataTracking() tinfoil.prepare(config_only=True) return tinfoil
def test_wait_event(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.set_event_mask([ 'bb.event.FilesMatchingFound', 'bb.command.CommandCompleted', 'bb.command.CommandFailed', 'bb.command.CommandExit' ]) # Need to drain events otherwise events that were masked may still be in the queue while tinfoil.wait_event(): pass pattern = 'conf' res = tinfoil.run_command('testCookerCommandEvent', pattern, handle_events=False) self.assertTrue(res) eventreceived = False commandcomplete = False start = time.time() # Wait for maximum 60s in total so we'd detect spurious heartbeat events for example while (not (eventreceived == True and commandcomplete == True) and (time.time() - start < 60)): # if we received both events (on let's say a good day), we are done event = tinfoil.wait_event(1) if event: if isinstance(event, bb.command.CommandCompleted): commandcomplete = True elif isinstance(event, bb.event.FilesMatchingFound): self.assertEqual(pattern, event._pattern) self.assertIn('A', event._matches) self.assertIn('B', event._matches) eventreceived = True elif isinstance(event, logging.LogRecord): continue else: self.fail('Unexpected event: %s' % event) self.assertTrue( commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server (Matching event received: %s)' % str(eventreceived)) self.assertTrue( eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
def setup_tinfoil(config_only=False, basepath=None, tracking=False): """Initialize tinfoil api from bitbake""" import scriptpath orig_cwd = os.path.abspath(os.curdir) if basepath: os.chdir(basepath) bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error("Unable to find bitbake by searching parent directory of this script or PATH") sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) tinfoil.prepare(config_only) tinfoil.logger.setLevel(logger.getEffectiveLevel()) os.chdir(orig_cwd) return tinfoil
def test_parse_recipe_copy_expand(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) testrecipe = 'mdadm' best = tinfoil.find_best_provider(testrecipe) if not best: self.fail('Unable to find recipe providing %s' % testrecipe) rd = tinfoil.parse_recipe_file(best[3]) # Check we can get variable values self.assertEqual(testrecipe, rd.getVar('PN')) # Check that expanding a value that includes a variable reference works self.assertEqual(testrecipe, rd.getVar('BPN')) # Now check that changing the referenced variable's value in a copy gives that # value when expanding localdata = bb.data.createCopy(rd) localdata.setVar('PN', 'hello') self.assertEqual('hello', localdata.getVar('BPN'))
def test_list_recipes(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False, quiet=2) # Check pkg_pn checkpns = ['tar', 'automake', 'coreutils', 'm4-native', 'nativesdk-gcc'] pkg_pn = tinfoil.cooker.recipecaches[''].pkg_pn for pn in checkpns: self.assertIn(pn, pkg_pn) # Check pkg_fn checkfns = {'nativesdk-gcc': '^virtual:nativesdk:.*', 'coreutils': '.*/coreutils_.*.bb'} for fn, pn in tinfoil.cooker.recipecaches[''].pkg_fn.items(): if pn in checkpns: if pn in checkfns: self.assertTrue(re.match(checkfns[pn], fn), 'Entry for %s: %s did not match %s' % (pn, fn, checkfns[pn])) checkpns.remove(pn) if checkpns: self.fail('Unable to find pkg_fn entries for: %s' % ', '.join(checkpns))
def runqemu(pn, test): import bb.tinfoil import bb.build tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "90") import oe.recipeutils recipefile = oe.recipeutils.pn_to_recipe(tinfoil.cooker, pn) recipedata = oe.recipeutils.parse_recipe(recipefile, [], tinfoil.config_data) # The QemuRunner log is saved out, but we need to ensure it is at the right # log level (and then ensure that since it's a child of the BitBake logger, # we disable propagation so we don't then see the log events on the console) logger = logging.getLogger('BitBake.QemuRunner') logger.setLevel(logging.DEBUG) logger.propagate = False logdir = recipedata.getVar("TEST_LOG_DIR", True) qemu = oeqa.targetcontrol.QemuTarget(recipedata) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. # Luckily QemuTarget doesn't need it after the constructor. tinfoil.shutdown() try: qemu.deploy() try: qemu.start() except bb.build.FuncFailed: raise Exception('Failed to start QEMU - see the logs in %s' % logdir) yield qemu finally: try: qemu.stop() except: pass
def test_dependencies(self): """ Verify that the correct dependencies are generated for specific SRC_URI entries. """ with bb.tinfoil.Tinfoil() as tinfoil, tempfile.TemporaryDirectory( prefix="selftest-fetch") as tempdir: tinfoil.prepare(config_only=False, quiet=2) r = """ LICENSE="CLOSED" SRC_URI="http://example.com/tarball.zip" """ f = self.write_recipe(textwrap.dedent(r), tempdir) d = tinfoil.parse_recipe_file(f) self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends")) # Verify that the downloadfilename overrides the URI r = """ LICENSE="CLOSED" SRC_URI="https://example.com/tarball;downloadfilename=something.zip" """ f = self.write_recipe(textwrap.dedent(r), tempdir) d = tinfoil.parse_recipe_file(f) self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) self.assertIn("unzip-native", d.getVarFlag("do_unpack", "depends") or "") r = """ LICENSE="CLOSED" SRC_URI="ftp://example.com/tarball.lz" """ f = self.write_recipe(textwrap.dedent(r), tempdir) d = tinfoil.parse_recipe_file(f) self.assertIn("wget-native", d.getVarFlag("do_fetch", "depends")) self.assertIn("lzip-native", d.getVarFlag("do_unpack", "depends")) r = """ LICENSE="CLOSED" SRC_URI="git://example.com/repo" """ f = self.write_recipe(textwrap.dedent(r), tempdir) d = tinfoil.parse_recipe_file(f) self.assertIn("git-native", d.getVarFlag("do_fetch", "depends"))
def setup_tinfoil(bitbakepath, enable_tracking, loglevel=None): sys.path.insert(0, bitbakepath + '/lib') import bb.tinfoil import bb.cooker import bb.data try: tinfoil = bb.tinfoil.Tinfoil(tracking=enable_tracking) except TypeError: # old API tinfoil = bb.tinfoil.Tinfoil() if enable_tracking: tinfoil.cooker.enableDataTracking() tinfoil.logger.setLevel(logging.WARNING) if loglevel: tinfoil.logger.setLevel(loglevel) tinfoil.prepare(config_only=True) return tinfoil
def setup_tinfoil(self, tracking=False): tinfoil = self.bb.tinfoil.Tinfoil(tracking=tracking) options = self.bb.tinfoil.TinfoilConfigParameters(False, parse_only=True, dry_run=True) tinfoil.prepare(config_params=options, extra_features=[self.bb.cooker.CookerFeatures.HOB_EXTRA_CACHES]) # this part is from bitbake/lib/bblayers: tinfoil.bblayers = (tinfoil.config_data.getVar('BBLAYERS', True) or "").split() layerconfs = tinfoil.config_data.varhistory.get_variable_items_files( 'BBFILE_COLLECTIONS', tinfoil.config_data) tinfoil.config_data.bbfile_collections = { layer: os.path.dirname(os.path.dirname(path)) for layer, path in layerconfs.items()} return tinfoil
def extract_bitbake_metadata(recipes): # tinfoil sets up log output for the bitbake loggers, but bb uses # a separate namespace at this time setup_log_handler(logging.getLogger('bb')) tinfoil = Tinfoil(output=sys.stderr) tinfoil.prepare(config_only=True) tinfoil.parseRecipes() data = {} metadata = tinfoil.parse_metadata() machine = metadata.getVar('MACHINE', True) data['image-bootloader'] = metadata.getVar('IMAGE_BOOTLOADER', True) data['soc-family'] = metadata.getVar('SOC_FAMILY', True) if data['soc-family'] is None: data['soc-family'] = metadata.getVar('MACHINEOVERRIDES', True) data['recipes'] = {} metadata = None for recipe in recipes: try: metadata = tinfoil.parse_metadata(recipe) except: continue pv = metadata.getVar('PV', True) localversion = metadata.getVar('LOCALVERSION', True) version = pv + (localversion or '') data['recipes'][recipe] = {} data['recipes'][recipe]['recipe'] = metadata.getVar('PN', True) data['recipes'][recipe]['version'] = version data['recipes'][recipe]['file'] = tinfoil.build_target_to_fn(recipe) data['recipes'][recipe]['srcbranch'] = metadata.getVar('SRCBRANCH', True) data['recipes'][recipe]['compatible-machine'] = metadata.getVar('COMPATIBLE_MACHINE', True) description = metadata.getVar('DESCRIPTION', True) if not description: description = metadata.getVar('SUMMARY', True) data['recipes'][recipe]['description'] = description return {machine: data}
def extract_bitbake_metadata(recipes): # tinfoil sets up log output for the bitbake loggers, but bb uses # a separate namespace at this time setup_log_handler(logging.getLogger('bb')) tinfoil = Tinfoil(output=sys.stderr) tinfoil.prepare(config_only=True) tinfoil.parseRecipes() data = {} metadata = tinfoil.parse_metadata() machine = metadata.getVar('MACHINE') data['image-bootloader'] = metadata.getVar('IMAGE_BOOTLOADER') data['soc-family'] = metadata.getVar('SOC_FAMILY') data['recipes'] = {} metadata = None for recipe in recipes: try: metadata = tinfoil.parse_metadata(recipe) except: continue pv = metadata.getVar('PV', True) localversion = metadata.getVar('LOCALVERSION', True) version = pv + (localversion or '') data['recipes'][recipe] = {} data['recipes'][recipe]['recipe'] = metadata.getVar('PN', True) data['recipes'][recipe]['version'] = version data['recipes'][recipe]['file'] = tinfoil.build_target_to_fn(recipe) data['recipes'][recipe]['srcbranch'] = metadata.getVar( 'SRCBRANCH', True) data['recipes'][recipe]['compatible-machine'] = metadata.getVar( 'COMPATIBLE_MACHINE', True) description = metadata.getVar('DESCRIPTION', True) if not description: description = metadata.getVar('SUMMARY', True) data['recipes'][recipe]['description'] = description return {machine: data}
def setup_tinfoil(config_only=False, basepath=None, tracking=False): """Initialize tinfoil api from bitbake""" import scriptpath orig_cwd = os.path.abspath(os.curdir) if basepath: os.chdir(basepath) bitbakepath = scriptpath.add_bitbake_lib_path() if not bitbakepath: logger.error( "Unable to find bitbake by searching parent directory of this script or PATH" ) sys.exit(1) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil(tracking=tracking) tinfoil.prepare(config_only) tinfoil.logger.setLevel(logger.getEffectiveLevel()) os.chdir(orig_cwd) return tinfoil
def test_wait_event(self): with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.set_event_mask( ['bb.event.FilesMatchingFound', 'bb.command.CommandCompleted']) # Need to drain events otherwise events that were masked may still be in the queue while tinfoil.wait_event(): pass pattern = 'conf' res = tinfoil.run_command('findFilesMatchingInDir', pattern, 'conf/machine') self.assertTrue(res) eventreceived = False commandcomplete = False start = time.time() # Wait for 10s in total so we'd detect spurious heartbeat events for example # The test is IO load sensitive too while time.time() - start < 10: event = tinfoil.wait_event(1) if event: if isinstance(event, bb.command.CommandCompleted): commandcomplete = True elif isinstance(event, bb.event.FilesMatchingFound): self.assertEqual(pattern, event._pattern) self.assertIn('qemuarm.conf', event._matches) eventreceived = True elif isinstance(event, logging.LogRecord): continue else: self.fail('Unexpected event: %s' % event) self.assertTrue( commandcomplete, 'Timed out waiting for CommandCompleted event from bitbake server' ) self.assertTrue( eventreceived, 'Did not receive FilesMatchingFound event from bitbake server')
def test_setvariable_clean(self): # First check that setVariable affects the datastore with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue') self.assertEqual( tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()' ) # Now check that the setVariable's effects are no longer present # (this may legitimately break in future if we stop reinitialising # the datastore, in which case we'll have to reconsider use of # setVariable entirely) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) self.assertNotEqual( tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!') # Now check that setVar on the main datastore works (uses setVariable internally) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.config_data.setVar('TESTVAR', 'specialvalue') value = tinfoil.run_command('getVariable', 'TESTVAR') self.assertEqual( value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()' )
def test_setvariable_clean(self): # First check that setVariable affects the datastore with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.run_command('setVariable', 'TESTVAR', 'specialvalue') self.assertEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is not reflected in client-side getVar()') # Now check that the setVariable's effects are no longer present # (this may legitimately break in future if we stop reinitialising # the datastore, in which case we'll have to reconsider use of # setVariable entirely) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) self.assertNotEqual(tinfoil.config_data.getVar('TESTVAR'), 'specialvalue', 'Value set using setVariable is still present!') # Now check that setVar on the main datastore works (uses setVariable internally) with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) tinfoil.config_data.setVar('TESTVAR', 'specialvalue') value = tinfoil.run_command('getVariable', 'TESTVAR') self.assertEqual(value, 'specialvalue', 'Value set using config_data.setVar() is not reflected in config_data.getVar()')
def compare_siglists(a_blob, b_blob, taskdiff=False): # FIXME collapse down a recipe's tasks? alines = a_blob.data_stream.read().decode('utf-8').splitlines() blines = b_blob.data_stream.read().decode('utf-8').splitlines() keys = [] pnmap = {} def readsigs(lines): sigs = {} for line in lines: linesplit = line.split() if len(linesplit) > 2: sigs[linesplit[0]] = linesplit[2] if not linesplit[0] in keys: keys.append(linesplit[0]) pnmap[linesplit[1]] = linesplit[0].rsplit('.', 1)[0] return sigs adict = readsigs(alines) bdict = readsigs(blines) out = [] changecount = 0 addcount = 0 removecount = 0 if taskdiff: with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=True) changes = collections.OrderedDict() def compare_hashfiles(pn, taskname, hash1, hash2): hashes = [hash1, hash2] hashfiles = bb.siggen.find_siginfo(pn, taskname, hashes, tinfoil.config_data) if not taskname: (pn, taskname) = pn.rsplit('.', 1) pn = pnmap.get(pn, pn) desc = '%s.%s' % (pn, taskname) if len(hashfiles) == 0: out.append("Unable to find matching sigdata for %s with hashes %s or %s" % (desc, hash1, hash2)) elif not hash1 in hashfiles: out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash1)) elif not hash2 in hashfiles: out.append("Unable to find matching sigdata for %s with hash %s" % (desc, hash2)) else: out2 = bb.siggen.compare_sigfiles(hashfiles[hash1], hashfiles[hash2], recursecb, collapsed=True) for line in out2: m = hashlib.sha256() m.update(line.encode('utf-8')) entry = changes.get(m.hexdigest(), (line, [])) if desc not in entry[1]: changes[m.hexdigest()] = (line, entry[1] + [desc]) # Define recursion callback def recursecb(key, hash1, hash2): compare_hashfiles(key, None, hash1, hash2) return [] for key in keys: siga = adict.get(key, None) sigb = bdict.get(key, None) if siga is not None and sigb is not None and siga != sigb: changecount += 1 (pn, taskname) = key.rsplit('.', 1) compare_hashfiles(pn, taskname, siga, sigb) elif siga is None: addcount += 1 elif sigb is None: removecount += 1 for key, item in changes.items(): line, tasks = item if len(tasks) == 1: desc = tasks[0] elif len(tasks) == 2: desc = '%s and %s' % (tasks[0], tasks[1]) else: desc = '%s and %d others' % (tasks[-1], len(tasks)-1) out.append('%s: %s' % (desc, line)) else: for key in keys: siga = adict.get(key, None) sigb = bdict.get(key, None) if siga is not None and sigb is not None and siga != sigb: out.append('%s changed from %s to %s' % (key, siga, sigb)) changecount += 1 elif siga is None: out.append('%s was added' % key) addcount += 1 elif sigb is None: out.append('%s was removed' % key) removecount += 1 out.append('Summary: %d tasks added, %d tasks removed, %d tasks modified (%.1f%%)' % (addcount, removecount, changecount, (changecount / float(len(bdict)) * 100))) return '\n'.join(out)
def select_recipes(args): import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(False) pkg_pn = tinfoil.cooker.recipecache.pkg_pn (latest_versions, preferred_versions) = bb.providers.findProviders(tinfoil.config_data, tinfoil.cooker.recipecache, pkg_pn) skip_classes = args.skip_classes.split(',') recipelist = [] for pn in sorted(pkg_pn): pref = preferred_versions[pn] inherits = [os.path.splitext(os.path.basename(f))[0] for f in tinfoil.cooker.recipecache.inherits[pref[1]]] for cls in skip_classes: if cls in inherits: break else: recipelist.append(pn) tinfoil.shutdown() resume_from = args.resume_from if resume_from: if not resume_from in recipelist: print('%s is not a testable recipe' % resume_from) return 1 if args.only: only = args.only.split(',') for onlyitem in only: for pn in recipelist: if fnmatch.fnmatch(pn, onlyitem): break else: print('%s does not match any testable recipe' % onlyitem) return 1 else: only = None if args.skip: skip = args.skip.split(',') else: skip = [] recipes = [] for pn in recipelist: if resume_from: if pn == resume_from: resume_from = None else: continue if args.only: for item in only: if fnmatch.fnmatch(pn, item): break else: continue skipit = False for item in skip: if fnmatch.fnmatch(pn, item): skipit = True if skipit: continue recipes.append(pn) return recipes
def main(): # Get access to our Django model newpath = os.path.abspath(os.path.dirname(os.path.abspath(sys.argv[0])) + '/..') sys.path.append(newpath) os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' from django.core.management import setup_environ from django.conf import settings from layerindex.models import LayerItem, Recipe from django.db import transaction import settings setup_environ(settings) # Set path to bitbake lib dir basepath = os.path.abspath(sys.argv[1]) bitbakedir_env = os.environ.get('BITBAKEDIR', '') if bitbakedir_env and os.path.exists(bitbakedir_env + '/lib/bb'): bitbakepath = bitbakedir_env elif os.path.exists(basepath + '/bitbake/lib/bb'): bitbakepath = basepath + '/bitbake' elif os.path.exists(basepath + '/../bitbake/lib/bb'): bitbakepath = os.path.abspath(basepath + '/../bitbake') else: # look for bitbake/bin dir in PATH bitbakepath = None for pth in os.environ['PATH'].split(':'): if os.path.exists(os.path.join(pth, '../lib/bb')): bitbakepath = os.path.abspath(os.path.join(pth, '..')) break if not bitbakepath: print("Unable to find bitbake by searching BITBAKEDIR, specified path '%s' or its parent, or PATH" % basepath) sys.exit(1) # Skip sanity checks os.environ['BB_ENV_EXTRAWHITE'] = 'DISABLE_SANITY_CHECKS' os.environ['DISABLE_SANITY_CHECKS'] = '1' sys.path.extend([bitbakepath + '/lib']) import bb.tinfoil tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(config_only = True) logger = logging.getLogger('BitBake') fetchdir = settings.LAYER_FETCH_DIR if not fetchdir: logger.error("Please set LAYER_FETCH_DIR in settings.py") sys.exit(1) for layer in LayerItem.objects.filter(status='P'): urldir = sanitise_path(layer.vcs_url) repodir = os.path.join(fetchdir, urldir) layerrecipes = Recipe.objects.filter(layer=layer) for recipe in layerrecipes: fullpath = str(os.path.join(repodir, layer.vcs_subdir, recipe.filepath, recipe.filename)) print fullpath try: envdata = bb.cache.Cache.loadDataFull(fullpath, [], tinfoil.config_data) print "DESCRIPTION = \"%s\"" % envdata.getVar("DESCRIPTION", True) except Exception as e: logger.info("Unable to read %s: %s", fullpath, str(e)) sys.exit(0)
def main(): docfiles = [] metadirs = [] bbvars = set() undocumented = [] docconf = "" onlydoctags = False # Collect and validate input try: opts, args = getopt.getopt(sys.argv[1:], "d:hm:t:T", ["help"]) except getopt.GetoptError as err: print('%s' % str(err)) usage() sys.exit(2) for o, a in opts: if o in ('-h', '--help'): usage() sys.exit(0) elif o == '-d': if os.path.isfile(a): docfiles.append(a) else: print('ERROR: documentation file %s is not a regular file' % a) sys.exit(3) elif o == '-m': if os.path.isdir(a): metadirs.append(a) else: print('ERROR: meta directory %s is not a directory' % a) sys.exit(4) elif o == "-t": if os.path.isfile(a): docconf = a elif o == "-T": onlydoctags = True else: assert False, "unhandled option" if len(docfiles) == 0: print('ERROR: no docfile specified') usage() sys.exit(5) if len(metadirs) == 0: print('ERROR: no metadir specified') usage() sys.exit(6) if onlydoctags and docconf == "": print('ERROR: no docconf specified') usage() sys.exit(7) prog = re.compile("^[^a-z]*$") with bb.tinfoil.Tinfoil() as tinfoil: tinfoil.prepare(config_only=False) parser = bb.codeparser.PythonParser('parser', None) datastore = tinfoil.config_data def bbvars_update(data): if prog.match(data): bbvars.add(data) if tinfoil.config_data.getVarFlag(data, 'python'): try: parser.parse_python(tinfoil.config_data.getVar(data)) except bb.data_smart.ExpansionError: pass for var in parser.references: if prog.match(var): bbvars.add(var) else: try: expandedVar = datastore.expandWithRefs(datastore.getVar(data, False), data) for var in expandedVar.references: if prog.match(var): bbvars.add(var) except bb.data_smart.ExpansionError: pass # Use tinfoil to collect all the variable names globally for data in datastore: bbvars_update(data) # Collect variables from all recipes for recipe in tinfoil.all_recipe_files(): print("Checking %s" % recipe) for data in tinfoil.parse_recipe_file(recipe): bbvars_update(data) documented_vars = collect_documented_vars(docfiles) # Check each var for documentation varlen = 0 for v in bbvars: if len(v) > varlen: varlen = len(v) if not bbvar_is_documented(v, documented_vars): undocumented.append(v) undocumented.sort() varlen = varlen + 1 # Report all undocumented variables print('Found %d undocumented bb variables (out of %d):' % (len(undocumented), len(bbvars))) header = '%s%s' % (str("VARIABLE").ljust(varlen), str("DOCTAG").ljust(7)) print(header) print(str("").ljust(len(header), '=')) for v in undocumented: doctag = bbvar_doctag(v, docconf) if not onlydoctags or not doctag == "": print('%s%s' % (v.ljust(varlen), doctag))
def runqemu(pn, ssh=True, runqemuparams='', image_fstype=None, launch_cmd=None, qemuparams=None, overrides={}, discard_writes=True): """ launch_cmd means directly run the command, don't need set rootfs or env vars. """ import bb.tinfoil import bb.build tinfoil = bb.tinfoil.Tinfoil() tinfoil.prepare(config_only=False, quiet=True) try: tinfoil.logger.setLevel(logging.WARNING) import oeqa.targetcontrol tinfoil.config_data.setVar("TEST_LOG_DIR", "${WORKDIR}/testimage") tinfoil.config_data.setVar("TEST_QEMUBOOT_TIMEOUT", "1000") # Tell QemuTarget() whether need find rootfs/kernel or not if launch_cmd: tinfoil.config_data.setVar("FIND_ROOTFS", '0') else: tinfoil.config_data.setVar("FIND_ROOTFS", '1') recipedata = tinfoil.parse_recipe(pn) for key, value in overrides.items(): recipedata.setVar(key, value) # The QemuRunner log is saved out, but we need to ensure it is at the right # log level (and then ensure that since it's a child of the BitBake logger, # we disable propagation so we don't then see the log events on the console) logger = logging.getLogger('BitBake.QemuRunner') logger.setLevel(logging.DEBUG) logger.propagate = False logdir = recipedata.getVar("TEST_LOG_DIR") qemu = oeqa.targetcontrol.QemuTarget(recipedata, image_fstype) finally: # We need to shut down tinfoil early here in case we actually want # to run tinfoil-using utilities with the running QEMU instance. # Luckily QemuTarget doesn't need it after the constructor. tinfoil.shutdown() # Setup bitbake logger as console handler is removed by tinfoil.shutdown bblogger = logging.getLogger('BitBake') bblogger.setLevel(logging.INFO) console = logging.StreamHandler(sys.stdout) bbformat = bb.msg.BBLogFormatter("%(levelname)s: %(message)s") if sys.stdout.isatty(): bbformat.enable_color() console.setFormatter(bbformat) bblogger.addHandler(console) try: qemu.deploy() try: qemu.start(params=qemuparams, ssh=ssh, runqemuparams=runqemuparams, launch_cmd=launch_cmd, discard_writes=discard_writes) except bb.build.FuncFailed: raise Exception('Failed to start QEMU - see the logs in %s' % logdir) yield qemu finally: try: qemu.stop() except: pass
def setUpModule(): global tinfoil global metaselftestpath metaselftestpath = get_test_layer() tinfoil = bb.tinfoil.Tinfoil(tracking=True) tinfoil.prepare(config_only=False, quiet=2)