def runqemu(args, config, basepath, workspace): """Entry point for the devtool 'runqemu' subcommand""" tinfoil = setup_tinfoil(config_only=True, basepath=basepath) machine = tinfoil.config_data.getVar('MACHINE', True) bindir_native = tinfoil.config_data.getVar('STAGING_BINDIR_NATIVE', True) tinfoil.shutdown() if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')): raise DevtoolError('QEMU is not available within this SDK') imagename = args.imagename if not imagename: sdk_targets = config.get('SDK', 'sdk_targets', '').split() if sdk_targets: imagename = sdk_targets[0] if not imagename: raise DevtoolError('Unable to determine image name to run, please specify one') try: exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode return 0
def package(args, config, basepath, workspace): """Entry point for the devtool 'package' subcommand""" check_workspace_recipe(workspace, args.recipename) tinfoil = setup_tinfoil(basepath=basepath) try: tinfoil.prepare(config_only=True) image_pkgtype = config.get('Package', 'image_pkgtype', '') if not image_pkgtype: image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True) deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper(), True) finally: tinfoil.shutdown() package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode logger.info('Your packages are in %s' % deploy_dir_pkg) return 0
def configure(args, config, basepath, workspace): """Entry point for the devtool 'configure' subcommand""" if args.component not in workspace: raise DevtoolError( "recipe %s is not in your workspace, run devtool modify command first" % args.component) rd = "" tinfoil = setup_tinfoil(basepath=basepath) try: rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False) if not rd: return 1 pn = rd.getVar('PN', True) if pn not in workspace: raise DevtoolError( "Run devtool modify before calling configure for %s" % pn) finally: tinfoil.shutdown() exec_build_env_command(config.init_path, basepath, 'bitbake -c configure %s' % pn, watch=True) return 0
def sdk_install(args, config, basepath, workspace): """Entry point for the devtool sdk-install command""" import oe.recipeutils import bb.process for recipe in args.recipename: if recipe in workspace: raise DevtoolError('recipe %s is a recipe in your workspace' % recipe) tasks = ['do_populate_sysroot', 'do_packagedata'] stampprefixes = {} def checkstamp(recipe): stampprefix = stampprefixes[recipe] stamps = glob.glob(stampprefix + '*') for stamp in stamps: if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')): return True else: return False install_recipes = [] tinfoil = setup_tinfoil(config_only=False, basepath=basepath) try: for recipe in args.recipename: rd = parse_recipe(config, tinfoil, recipe, True) if not rd: return 1 stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP', True), tasks[0]) if checkstamp(recipe): logger.info('%s is already installed' % recipe) else: install_recipes.append(recipe) finally: tinfoil.shutdown() if install_recipes: logger.info('Installing %s...' % ', '.join(install_recipes)) install_tasks = [] for recipe in install_recipes: for task in tasks: if recipe.endswith('-native') and 'package' in task: continue install_tasks.append('%s:%s' % (recipe, task)) options = '' if not args.allow_build: options += ' --setscene-only' try: exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True) except bb.process.ExecutionError as e: raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e))) failed = False for recipe in install_recipes: if checkstamp(recipe): logger.info('Successfully installed %s' % recipe) else: raise DevtoolError('Failed to install %s - unavailable' % recipe) failed = True if failed: return 2
def runqemu(args, config, basepath, workspace): """Entry point for the devtool 'runqemu' subcommand""" tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: machine = tinfoil.config_data.getVar('MACHINE') bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'), tinfoil.config_data.getVar('BUILD_ARCH'), tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep)) finally: tinfoil.shutdown() if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')): raise DevtoolError('QEMU is not available within this SDK') imagename = args.imagename if not imagename: sdk_targets = config.get('SDK', 'sdk_targets', '').split() if sdk_targets: imagename = sdk_targets[0] if not imagename: raise DevtoolError('Unable to determine image name to run, please specify one') try: # FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't # run bitbake to find out the values of various environment variables, which # isn't the case for the extensible SDK. Work around it for now. newenv = dict(os.environ) newenv.pop('OECORE_NATIVE_SYSROOT', '') exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True) build_tasks = _get_build_tasks(config) bbappend = workspace[workspacepn]['bbappend'] if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") _set_file_values(bbappend, {'PARALLEL_MAKE': ''}) try: bbargs = [] for task in build_tasks: if args.recipename.endswith('-native') and 'package' in task: continue bbargs.append('%s:%s' % (args.recipename, task)) exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if args.disable_parallel_make: _set_file_values(bbappend, {'PARALLEL_MAKE': None}) return 0
def reset(args, config, basepath, workspace): import bb.utils if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 if not args.no_clean: logger.info('Cleaning sysroot for recipe %s...' % args.recipename) exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % args.recipename) _check_preserve(config, args.recipename) preservepath = os.path.join(config.workspace_path, 'attic', args.recipename) def preservedir(origdir): if os.path.exists(origdir): for fn in os.listdir(origdir): logger.warn('Preserving %s in %s' % (fn, preservepath)) bb.utils.mkdirhier(preservepath) shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, 'recipes', args.recipename)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, 'appends', args.recipename)) return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" if not args.recipename in workspace: raise DevtoolError("no recipe named %s in your workspace" % args.recipename) build_task = config.get("Build", "build_task", "populate_sysroot") postfile_param = "" postfile = "" if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") postfile = os.path.join(basepath, "conf", "disable_parallelism.conf") _create_conf_file({"PARALLEL_MAKE": ""}, postfile) postfile_param = "-R %s" % postfile try: exec_build_env_command( config.init_path, basepath, "bitbake -c %s %s %s" % (build_task, postfile_param, args.recipename), watch=True, ) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if postfile: logger.debug("Removing postfile") os.remove(postfile) return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" if not args.recipename in workspace: raise DevtoolError("no recipe named %s in your workspace" % args.recipename) build_task = config.get('Build', 'build_task', 'populate_sysroot') postfile_param = "" postfile = "" if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") postfile = os.path.join(basepath, 'conf', 'disable_parallelism.conf') _create_conf_file({'PARALLEL_MAKE':''}, postfile) postfile_param = "-R %s" % postfile try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s %s' % (build_task, postfile_param, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if postfile: logger.debug('Removing postfile') os.remove(postfile) return 0
def build(args, config, basepath, workspace): import bb if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 exec_build_env_command(config.init_path, basepath, 'bitbake -c install %s' % args.recipename, watch=True) return 0
def build(args, config, basepath, workspace): import bb if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 build_task = config.get('Build', 'build_task', 'populate_sysroot') exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (build_task, args.recipename), watch=True) return 0
def menuconfig(args, config, basepath, workspace): """Entry point for the devtool 'menuconfig' subcommand""" rd = "" kconfigpath = "" pn_src = "" localfilesdir = "" workspace_dir = "" tinfoil = setup_tinfoil(basepath=basepath) try: rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False) if not rd: return 1 check_workspace_recipe(workspace, args.component) pn = rd.getVar('PN', True) if not rd.getVarFlag('do_menuconfig', 'task'): raise DevtoolError( "This recipe does not support menuconfig option") workspace_dir = os.path.join(config.workspace_path, 'sources') kconfigpath = rd.getVar('B') pn_src = os.path.join(workspace_dir, pn) # add check to see if oe_local_files exists or not localfilesdir = os.path.join(pn_src, 'oe-local-files') if not os.path.exists(localfilesdir): bb.utils.mkdirhier(localfilesdir) # Add gitignore to ensure source tree is clean gitignorefile = os.path.join(localfilesdir, '.gitignore') with open(gitignorefile, 'w') as f: f.write( '# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n' ) f.write('*\n') finally: tinfoil.shutdown() logger.info('Launching menuconfig') exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg') res = standard._create_kconfig_diff(pn_src, rd, fragment) return 0
def reset(args, config, basepath, workspace): """Entry point for the devtool 'reset' subcommand""" import bb if args.recipename: if args.all: raise DevtoolError( "Recipe cannot be specified if -a/--all is used") elif not args.recipename in workspace: raise DevtoolError("no recipe named %s in your workspace" % args.recipename) elif not args.all: raise DevtoolError("Recipe must be specified, or specify -a/--all to " "reset all recipes") if args.all: recipes = workspace else: recipes = [args.recipename] for pn in recipes: if not args.no_clean: logger.info('Cleaning sysroot for recipe %s...' % pn) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % pn) except bb.process.ExecutionError as e: raise DevtoolError( 'Command \'%s\' failed, output:\n%s\nIf you ' 'wish, you may specify -n/--no-clean to ' 'skip running this command when resetting' % (e.command, e.stdout)) _check_preserve(config, pn) preservepath = os.path.join(config.workspace_path, 'attic', pn) def preservedir(origdir): if os.path.exists(origdir): for root, dirs, files in os.walk(origdir): for fn in files: logger.warn('Preserving %s in %s' % (fn, preservepath)) bb.utils.mkdirhier(preservepath) shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn)) for dn in dirs: os.rmdir(os.path.join(root, dn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, 'recipes', pn)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, 'appends', pn)) return 0
def build(args, config, basepath, workspace): import bb if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 build_task = config.get('Build', 'build_task', 'populate_sysroot') try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (build_task, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" import bb if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 build_task = config.get('Build', 'build_task', 'populate_sysroot') try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (build_task, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode return 0
def reset(args, config, basepath, workspace): """Entry point for the devtool 'reset' subcommand""" import bb if args.recipename: if args.all: raise DevtoolError("Recipe cannot be specified if -a/--all is used") else: check_workspace_recipe(workspace, args.recipename, checksrc=False) elif not args.all: raise DevtoolError("Recipe must be specified, or specify -a/--all to " "reset all recipes") if args.all: recipes = workspace.keys() else: recipes = [args.recipename] if recipes and not args.no_clean: if len(recipes) == 1: logger.info('Cleaning sysroot for recipe %s...' % recipes[0]) else: logger.info('Cleaning sysroot for recipes %s...' % ', '.join(recipes)) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % ' '.join(recipes)) except bb.process.ExecutionError as e: raise DevtoolError('Command \'%s\' failed, output:\n%s\nIf you ' 'wish, you may specify -n/--no-clean to ' 'skip running this command when resetting' % (e.command, e.stdout)) for pn in recipes: _check_preserve(config, pn) preservepath = os.path.join(config.workspace_path, 'attic', pn) def preservedir(origdir): if os.path.exists(origdir): for root, dirs, files in os.walk(origdir): for fn in files: logger.warn('Preserving %s in %s' % (fn, preservepath)) _move_file(os.path.join(origdir, fn), os.path.join(preservepath, fn)) for dn in dirs: os.rmdir(os.path.join(root, dn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, 'recipes', pn)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, 'appends', pn)) return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True) tinfoil = setup_tinfoil(config_only=False, basepath=basepath) try: rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) if not rd: return 1 deploytask = 'do_deploy' in rd.getVar('__BBTASKS') finally: tinfoil.shutdown() if args.clean: # use clean instead of cleansstate to avoid messing things up in eSDK build_tasks = ['do_clean'] else: build_tasks = _get_build_tasks(config) if deploytask: build_tasks.append('do_deploy') bbappend = workspace[workspacepn]['bbappend'] if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") _set_file_values(bbappend, {'PARALLEL_MAKE': ''}) try: bbargs = [] for task in build_tasks: if args.recipename.endswith('-native') and 'package' in task: continue bbargs.append('%s:%s' % (args.recipename, task)) exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if args.disable_parallel_make: _set_file_values(bbappend, {'PARALLEL_MAKE': None}) return 0
def deploy(args, config, basepath, workspace): import re from devtool import exec_build_env_command if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 try: host, destdir = args.target.split(':') except ValueError: destdir = '/' else: args.target = host deploy_dir = os.path.join(basepath, 'target_deploy', args.target) deploy_file = os.path.join(deploy_dir, args.recipename + '.list') stdout, stderr = exec_build_env_command(config.init_path, basepath, 'bitbake -e %s' % args.recipename, shell=True) recipe_outdir = re.search(r'^D="(.*)"', stdout, re.MULTILINE).group(1) if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): logger.error('No files to deploy - have you built the %s recipe? If so, the install step has not installed any files.' % args.recipename) return -1 if args.dry_run: print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) for root, dirs, files in os.walk(recipe_outdir): for fn in files: print(' %s' % os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)) return 0 if os.path.exists(deploy_file): if undeploy(args, config, basepath, workspace): # Error already shown return -1 extraoptions = '' if args.no_host_check: extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' if not args.show_status: extraoptions += ' -q' ret = subprocess.call('scp -r %s %s/* %s:%s' % (extraoptions, recipe_outdir, args.target, destdir), shell=True) if ret != 0: logger.error('Deploy failed - rerun with -s to get a complete error message') return ret logger.info('Successfully deployed %s' % recipe_outdir) if not os.path.exists(deploy_dir): os.makedirs(deploy_dir) files_list = [] for root, _, files in os.walk(recipe_outdir): for filename in files: filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) files_list.append(os.path.join(destdir, filename)) with open(deploy_file, 'w') as fobj: fobj.write('\n'.join(files_list)) return 0
def add(args, config, basepath, workspace): import bb import oe.recipeutils if args.recipename in workspace: logger.error("recipe %s is already in your workspace" % args.recipename) return -1 reason = oe.recipeutils.validate_pn(args.recipename) if reason: logger.error(reason) return -1 srctree = os.path.abspath(args.srctree) appendpath = os.path.join(config.workspace_path, "appends") if not os.path.exists(appendpath): os.makedirs(appendpath) recipedir = os.path.join(config.workspace_path, "recipes", args.recipename) bb.utils.mkdirhier(recipedir) if args.version: if "_" in args.version or " " in args.version: logger.error('Invalid version string "%s"' % args.version) return -1 bp = "%s_%s" % (args.recipename, args.version) else: bp = args.recipename recipefile = os.path.join(recipedir, "%s.bb" % bp) if sys.stdout.isatty(): color = "always" else: color = args.color stdout, stderr = exec_build_env_command( config.init_path, basepath, "recipetool --color=%s create -o %s %s" % (color, recipefile, srctree) ) logger.info( "Recipe %s has been automatically created; further editing may be required to make it fully functional" % recipefile ) _add_md5(config, args.recipename, recipefile) initial_rev = None if os.path.exists(os.path.join(srctree, ".git")): (stdout, _) = bb.process.run("git rev-parse HEAD", cwd=srctree) initial_rev = stdout.rstrip() appendfile = os.path.join(appendpath, "%s.bbappend" % args.recipename) with open(appendfile, "w") as f: f.write("inherit externalsrc\n") f.write('EXTERNALSRC = "%s"\n' % srctree) if args.same_dir: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: f.write("\n# initial_rev: %s\n" % initial_rev) _add_md5(config, args.recipename, appendfile) return 0
def add(args, config, basepath, workspace): import bb import oe.recipeutils if args.recipename in workspace: logger.error("recipe %s is already in your workspace" % args.recipename) return -1 reason = oe.recipeutils.validate_pn(args.recipename) if reason: logger.error(reason) return -1 srctree = os.path.abspath(args.srctree) appendpath = os.path.join(config.workspace_path, 'appends') if not os.path.exists(appendpath): os.makedirs(appendpath) recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename) bb.utils.mkdirhier(recipedir) if args.version: if '_' in args.version or ' ' in args.version: logger.error('Invalid version string "%s"' % args.version) return -1 bp = "%s_%s" % (args.recipename, args.version) else: bp = args.recipename recipefile = os.path.join(recipedir, "%s.bb" % bp) if sys.stdout.isatty(): color = 'always' else: color = args.color stdout, stderr = exec_build_env_command( config.init_path, basepath, 'recipetool --color=%s create -o %s %s' % (color, recipefile, srctree)) logger.info( 'Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile) _add_md5(config, args.recipename, recipefile) initial_rev = None if os.path.exists(os.path.join(srctree, '.git')): (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) initial_rev = stdout.rstrip() appendfile = os.path.join(appendpath, '%s.bbappend' % args.recipename) with open(appendfile, 'w') as f: f.write('inherit externalsrc\n') f.write('EXTERNALSRC = "%s"\n' % srctree) if args.same_dir: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: f.write('\n# initial_rev: %s\n' % initial_rev) _add_md5(config, args.recipename, appendfile) return 0
def add(args, config, basepath, workspace): import bb import oe.recipeutils if args.recipename in workspace: logger.error("recipe %s is already in your workspace" % args.recipename) return -1 reason = oe.recipeutils.validate_pn(args.recipename) if reason: logger.error(reason) return -1 srctree = os.path.abspath(args.srctree) appendpath = os.path.join(config.workspace_path, 'appends') if not os.path.exists(appendpath): os.makedirs(appendpath) recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename) bb.utils.mkdirhier(recipedir) if args.version: if '_' in args.version or ' ' in args.version: logger.error('Invalid version string "%s"' % args.version) return -1 bp = "%s_%s" % (args.recipename, args.version) else: bp = args.recipename recipefile = os.path.join(recipedir, "%s.bb" % bp) if sys.stdout.isatty(): color = 'always' else: color = args.color try: stdout, stderr = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s"' % (color, recipefile, srctree)) logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile) except bb.process.ExecutionError as e: logger.error('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) return 1 _add_md5(config, args.recipename, recipefile) initial_rev = None if os.path.exists(os.path.join(srctree, '.git')): (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) initial_rev = stdout.rstrip() appendfile = os.path.join(appendpath, '%s.bbappend' % bp) with open(appendfile, 'w') as f: f.write('inherit externalsrc\n') f.write('EXTERNALSRC = "%s"\n' % srctree) if args.same_dir: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: f.write('\n# initial_rev: %s\n' % initial_rev) _add_md5(config, args.recipename, appendfile) return 0
def reset(args, config, basepath, workspace): import bb.utils if args.recipename: if args.all: logger.error("Recipe cannot be specified if -a/--all is used") return -1 elif not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 elif not args.all: logger.error( "Recipe must be specified, or specify -a/--all to reset all recipes" ) return -1 if args.all: recipes = workspace else: recipes = [args.recipename] for pn in recipes: if not args.no_clean: logger.info('Cleaning sysroot for recipe %s...' % pn) exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % pn) _check_preserve(config, pn) preservepath = os.path.join(config.workspace_path, 'attic', pn) def preservedir(origdir): if os.path.exists(origdir): for fn in os.listdir(origdir): logger.warn('Preserving %s in %s' % (fn, preservepath)) bb.utils.mkdirhier(preservepath) shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, 'recipes', pn)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, 'appends', pn)) return 0
def build_image(args, config, basepath, workspace): """Entry point for the devtool 'build-image' subcommand.""" image = args.recipe appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % image) with open(appendfile, 'w') as afile: afile.write('IMAGE_INSTALL_append = " %s"\n' % \ ' '.join(workspace.keys())) add_md5(config, image, appendfile) try: exec_build_env_command(config.init_path, basepath, 'bitbake %s' % image, watch=True) except ExecutionError as err: return err.exitcode LOG.info('Successfully built %s', image)
def kernel_menuconfig(args, config, basepath, workspace): """Entry point for the devtool 'kernel-menuconfig' subcommand""" # FIXME we end up with a triple parse here which is ugly (one for # the initial tinfoil instantiation, one for the modify, and then # finally one for the call to bitbake). Unfortunately it's # unavoidable without significant refactoring though so that will # have to wait until next release. tinfoil = setup_tinfoil(basepath=basepath) try: tinfoil.prepare(config_only=False) rd = parse_recipe(config, tinfoil, 'virtual/kernel', appends=True, filter_workspace=False) if not rd: return 1 pn = rd.getVar('PN', True) # We need to do this carefully as the version will change as a result of running devtool modify ver = rd.expand('${EXTENDPE}${PV}-${PR}') taintfn = (rd.getVar('STAMP', True) + '.do_compile.taint').replace(ver, '*') finally: tinfoil.shutdown() if not pn in workspace: # FIXME this will break if any options are added to the modify # subcommand. margs = argparse.Namespace() margs.recipename = pn margs.srctree = None margs.wildcard = False margs.extract = True margs.no_extract = False margs.same_dir = False margs.no_same_dir = False margs.branch = 'devtool' standard.modify(margs, config, basepath, workspace) exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True) # Remove taint created by do_menuconfig, if any for fn in glob.glob(taintfn): os.remove(fn) return 0
def reset(args, config, basepath, workspace): """Entry point for the devtool 'reset' subcommand""" import bb if args.recipename: if args.all: logger.error("Recipe cannot be specified if -a/--all is used") return -1 elif not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 elif not args.all: logger.error("Recipe must be specified, or specify -a/--all to reset all recipes") return -1 if args.all: recipes = workspace else: recipes = [args.recipename] for pn in recipes: if not args.no_clean: logger.info('Cleaning sysroot for recipe %s...' % pn) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c clean %s' % pn) except bb.process.ExecutionError as e: logger.error('Command \'%s\' failed, output:\n%s\nIf you wish, you may specify -n/--no-clean to skip running this command when resetting' % (e.command, e.stdout)) return 1 _check_preserve(config, pn) preservepath = os.path.join(config.workspace_path, 'attic', pn) def preservedir(origdir): if os.path.exists(origdir): for fn in os.listdir(origdir): logger.warn('Preserving %s in %s' % (fn, preservepath)) bb.utils.mkdirhier(preservepath) shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, 'recipes', pn)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, 'appends', pn)) return 0
def reset(args, config, basepath, workspace): import bb.utils if args.recipename: if args.all: logger.error("Recipe cannot be specified if -a/--all is used") return -1 elif not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 elif not args.all: logger.error("Recipe must be specified, or specify -a/--all to reset all recipes") return -1 if args.all: recipes = workspace else: recipes = [args.recipename] for pn in recipes: if not args.no_clean: logger.info("Cleaning sysroot for recipe %s..." % pn) exec_build_env_command(config.init_path, basepath, "bitbake -c clean %s" % pn) _check_preserve(config, pn) preservepath = os.path.join(config.workspace_path, "attic", pn) def preservedir(origdir): if os.path.exists(origdir): for fn in os.listdir(origdir): logger.warn("Preserving %s in %s" % (fn, preservepath)) bb.utils.mkdirhier(preservepath) shutil.move(os.path.join(origdir, fn), os.path.join(preservepath, fn)) os.rmdir(origdir) preservedir(os.path.join(config.workspace_path, "recipes", pn)) # We don't automatically create this dir next to appends, but the user can preservedir(os.path.join(config.workspace_path, "appends", pn)) return 0
def build_image(args, config, basepath, workspace): """Entry point for the devtool 'build-image' subcommand.""" image = args.recipe appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % image) # remove <image>.bbapend to make sure setup_tinfoil doesn't # breake because of it if os.path.isfile(appendfile): os.unlink(appendfile) recipes = _get_recipes(workspace, config) if recipes: with open(appendfile, 'w') as afile: # include selected recipes into the image afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(recipes)) # Generate notification callback devtool_warn_image_extended afile.write('do_rootfs[prefuncs] += "devtool_warn_image_extended"\n\n') afile.write("python devtool_warn_image_extended() {\n") afile.write(" bb.plain('NOTE: %%s: building with additional '\n" " 'packages due to \"devtool build-image\"'" " %% d.getVar('PN', True))\n" " bb.plain('NOTE: delete %%s to clear this' %% \\\n" " '%s')\n" % os.path.relpath(appendfile, basepath)) afile.write("}\n") logger.info('Building image %s with the following ' 'additional packages: %s', image, ' '.join(recipes)) else: logger.warning('No recipes in workspace, building image %s unmodified', image) # run bitbake to build image try: exec_build_env_command(config.init_path, basepath, 'bitbake %s' % image, watch=True) except ExecutionError as err: return err.exitcode logger.info('Successfully built %s', image)
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" check_workspace_recipe(workspace, args.recipename) build_task = _get_build_task(config) bbappend = workspace[args.recipename]["bbappend"] if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") _set_file_values(bbappend, {"PARALLEL_MAKE": ""}) try: exec_build_env_command( config.init_path, basepath, "bitbake -c %s %s" % (build_task, args.recipename), watch=True ) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if args.disable_parallel_make: _set_file_values(bbappend, {"PARALLEL_MAKE": None}) return 0
def package(args, config, basepath, workspace): """Entry point for the devtool 'package' subcommand""" if not args.recipename in workspace: raise DevtoolError("no recipe named %s in your workspace" % args.recipename) image_pkgtype = config.get('Package', 'image_pkgtype', '') if not image_pkgtype: tinfoil = setup_tinfoil() try: tinfoil.prepare(config_only=True) image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE', True) finally: tinfoil.shutdown() package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode logger.info('Your packages are in %s/tmp/deploy/%s' % (basepath, image_pkgtype)) return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" check_workspace_recipe(workspace, args.recipename) build_task = _get_build_task(config) bbappend = workspace[args.recipename]['bbappend'] if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") _set_file_values(bbappend, {'PARALLEL_MAKE': ''}) try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (build_task, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if args.disable_parallel_make: _set_file_values(bbappend, {'PARALLEL_MAKE': None}) return 0
def build(args, config, basepath, workspace): """Entry point for the devtool 'build' subcommand""" check_workspace_recipe(workspace, args.recipename) build_task = config.get('Build', 'build_task', 'populate_sysroot') postfile_param = "" postfile = "" if args.disable_parallel_make: logger.info("Disabling 'make' parallelism") postfile = os.path.join(basepath, 'conf', 'disable_parallelism.conf') _create_conf_file({'PARALLEL_MAKE':''}, postfile) postfile_param = "-R %s" % postfile try: exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s %s' % (build_task, postfile_param, args.recipename), watch=True) except bb.process.ExecutionError as e: # We've already seen the output since watch=True, so just ensure we return something to the user return e.exitcode finally: if postfile: logger.debug('Removing postfile') os.remove(postfile) return 0
def _prep_extract_operation(config, basepath, recipename): """HACK: Ugly workaround for making sure that requirements are met when trying to extract a package. Returns the tinfoil instance to be used.""" tinfoil = setup_tinfoil(basepath=basepath) rd = parse_recipe(config, tinfoil, recipename, True) if bb.data.inherits_class('kernel-yocto', rd): tinfoil.shutdown() try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'bitbake kern-tools-native') tinfoil = setup_tinfoil(basepath=basepath) except bb.process.ExecutionError as err: raise DevtoolError("Failed to build kern-tools-native:\n%s" % err.stdout) return tinfoil
def _prep_extract_operation(config, basepath, recipename): """HACK: Ugly workaround for making sure that requirements are met when trying to extract a package. Returns the tinfoil instance to be used.""" tinfoil = setup_tinfoil() rd = parse_recipe(config, tinfoil, recipename, True) if bb.data.inherits_class('kernel-yocto', rd): tinfoil.shutdown() try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'bitbake kern-tools-native') tinfoil = setup_tinfoil() except bb.process.ExecutionError as err: raise DevtoolError("Failed to build kern-tools-native:\n%s" % err.stdout) return tinfoil
def deploy(args, config, basepath, workspace): import re from devtool import exec_build_env_command if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 try: host, destdir = args.target.split(':') except ValueError: destdir = '/' else: args.target = host deploy_dir = os.path.join(basepath, 'target_deploy', args.target) deploy_file = os.path.join(deploy_dir, args.recipename + '.list') if os.path.exists(deploy_file): undeploy(args) stdout, stderr = exec_build_env_command(config.init_path, basepath, 'bitbake -e %s' % args.recipename, shell=True) recipe_outdir = re.search(r'^D="(.*)"', stdout, re.MULTILINE).group(1) ret = subprocess.call('scp -qr %s/* %s:%s' % (recipe_outdir, args.target, destdir), shell=True) if ret != 0: return ret logger.info('Successfully deployed %s' % recipe_outdir) if not os.path.exists(deploy_dir): os.makedirs(deploy_dir) files_list = [] for root, _, files in os.walk(recipe_outdir): for filename in files: filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) files_list.append(os.path.join(destdir, filename)) with open(deploy_file, 'w') as fobj: fobj.write('\n'.join(files_list)) return 0
def sdk_update(args, config, basepath, workspace): """Entry point for devtool sdk-update command""" updateserver = args.updateserver if not updateserver: updateserver = config.get('SDK', 'updateserver', '') logger.debug("updateserver: %s" % updateserver) # Make sure we are using sdk-update from within SDK logger.debug("basepath = %s" % basepath) old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc') if not os.path.exists(old_locked_sig_file_path): logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option") return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % basepath) if not '://' in updateserver: logger.error("Update server must be a URL") return -1 layers_dir = os.path.join(basepath, 'layers') conf_dir = os.path.join(basepath, 'conf') # Grab variable values tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR') sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS') site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION') finally: tinfoil.shutdown() tmpsdk_dir = tempfile.mkdtemp() try: os.makedirs(os.path.join(tmpsdk_dir, 'conf')) new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc') # Fetch manifest from server tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest') ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True) changedfiles = check_manifest(tmpmanifest, basepath) if not changedfiles: logger.info("Already up-to-date") return 0 # Update metadata logger.debug("Updating metadata via git ...") #Check for the status before doing a fetch and reset if os.path.exists(os.path.join(basepath, 'layers/.git')): out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir) if not out: ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir) else: logger.error("Failed to update metadata as there have been changes made to it. Aborting."); logger.error("Changed files:\n%s" % out); return -1 else: ret = -1 if ret != 0: ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating metadata via git failed") return ret logger.debug("Updating conf files ...") for changedfile in changedfiles: ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating %s failed" % changedfile) return ret # Check if UNINATIVE_CHECKSUM changed uninative = False if 'conf/local.conf' in changedfiles: def read_uninative_checksums(fn): chksumitems = [] with open(fn, 'r') as f: for line in f: if line.startswith('UNINATIVE_CHECKSUM'): splitline = re.split(r'[\[\]"\']', line) if len(splitline) > 3: chksumitems.append((splitline[1], splitline[3])) return chksumitems oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf')) newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf')) if oldsums != newsums: uninative = True for buildarch, chksum in newsums: uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch) mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file))) ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir) # Ok, all is well at this point - move everything over tmplayers_dir = os.path.join(tmpsdk_dir, 'layers') if os.path.exists(tmplayers_dir): shutil.rmtree(layers_dir) shutil.move(tmplayers_dir, layers_dir) for changedfile in changedfiles: destfile = os.path.join(basepath, changedfile) os.remove(destfile) shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile) os.remove(os.path.join(conf_dir, 'sdk-conf-manifest')) shutil.move(tmpmanifest, conf_dir) if uninative: shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative')) shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads')) if not sstate_mirrors: with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: f.write('SCONF_VERSION = "%s"\n' % site_conf_version) f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) finally: shutil.rmtree(tmpsdk_dir) if not args.skip_prepare: # Find all potentially updateable tasks sdk_update_targets = [] tasks = ['do_populate_sysroot', 'do_packagedata'] for root, _, files in os.walk(stamps_dir): for fn in files: if not '.sigdata.' in fn: for task in tasks: if '.%s.' % task in fn or '.%s_setscene.' % task in fn: sdk_update_targets.append('%s:%s' % (os.path.basename(root), task)) # Run bitbake command for the whole SDK logger.info("Preparing build system... (This may take some time.)") try: exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) runlines = [] for line in output.splitlines(): if 'Running task ' in line: runlines.append(line) if runlines: logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines)) return -1 except bb.process.ExecutionError as e: logger.error('Preparation failed:\n%s' % e.stdout) return -1 return 0
def configure_help(args, config, basepath, workspace): """Entry point for the devtool 'configure-help' subcommand""" import oe.utils check_workspace_recipe(workspace, args.recipename) tinfoil = setup_tinfoil(config_only=False, basepath=basepath) try: rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) if not rd: return 1 b = rd.getVar('B') s = rd.getVar('S') configurescript = os.path.join(s, 'configure') confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or []) configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '') extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '') extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '') do_configure = rd.getVar('do_configure') or '' do_configure_noexpand = rd.getVar('do_configure', False) or '' packageconfig = rd.getVarFlags('PACKAGECONFIG') or [] autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure) cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure) cmake_do_configure = rd.getVar('cmake_do_configure') pn = rd.getVar('PN') finally: tinfoil.shutdown() if 'doc' in packageconfig: del packageconfig['doc'] if autotools and not os.path.exists(configurescript): logger.info('Running do_configure to generate configure script') try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -c configure %s' % args.recipename, stderr=subprocess.STDOUT) except bb.process.ExecutionError: pass if confdisabled or do_configure.strip() in ('', ':'): raise DevtoolError("do_configure task has been disabled for this recipe") elif args.no_pager and not os.path.exists(configurescript): raise DevtoolError("No configure script found and no other information to display") else: configopttext = '' if autotools and configureopts: configopttext = ''' Arguments currently passed to the configure script: %s Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf) if extra_oeconf: configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily): %s''' % extra_oeconf elif cmake: in_cmake = False cmake_cmd = '' for line in cmake_do_configure.splitlines(): if in_cmake: cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\') if not line.endswith('\\'): break if line.lstrip().startswith('cmake '): cmake_cmd = line.strip().rstrip('\\') if line.endswith('\\'): in_cmake = True else: break if cmake_cmd: configopttext = ''' The current cmake command line: %s Arguments specified through EXTRA_OECMAKE (which you can change or add to easily) %s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake) else: configopttext = ''' The current implementation of cmake_do_configure: cmake_do_configure() { %s } Arguments specified through EXTRA_OECMAKE (which you can change or add to easily) %s''' % (cmake_do_configure.rstrip(), extra_oecmake) elif do_configure: configopttext = ''' The current implementation of do_configure: do_configure() { %s }''' % do_configure.rstrip() if '${EXTRA_OECONF}' in do_configure_noexpand: configopttext += ''' Arguments specified through EXTRA_OECONF (which you can change or add to easily): %s''' % extra_oeconf if packageconfig: configopttext += ''' Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.''' if args.arg: helpargs = ' '.join(args.arg) elif cmake: helpargs = '-LH' else: helpargs = '--help' msg = '''configure information for %s ------------------------------------------ %s''' % (pn, configopttext) if cmake: msg += ''' The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces). ------------------------------------------''' % (helpargs, pn) elif os.path.exists(configurescript): msg += ''' The ./configure %s output for %s follows. ------------------------------------------''' % (helpargs, pn) olddir = os.getcwd() tmppath = tempfile.mkdtemp() with tempfile.NamedTemporaryFile('w', delete=False) as tf: if not args.no_header: tf.write(msg + '\n') tf.close() try: try: cmd = 'cat %s' % tf.name if cmake: cmd += '; cmake %s %s 2>&1' % (helpargs, s) os.chdir(b) elif os.path.exists(configurescript): cmd += '; %s %s' % (configurescript, helpargs) if sys.stdout.isatty() and not args.no_pager: pager = os.environ.get('PAGER', 'less') cmd = '(%s) | %s' % (cmd, pager) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError as e: return e.returncode finally: os.chdir(olddir) shutil.rmtree(tmppath) os.remove(tf.name)
def add(args, config, basepath, workspace): """Entry point for the devtool 'add' subcommand""" import bb import oe.recipeutils if args.recipename in workspace: raise DevtoolError("recipe %s is already in your workspace" % args.recipename) reason = oe.recipeutils.validate_pn(args.recipename) if reason: raise DevtoolError(reason) # FIXME this ought to be in validate_pn but we're using that in other contexts if '/' in args.recipename: raise DevtoolError('"/" is not a valid character in recipe names') srctree = os.path.abspath(args.srctree) if os.path.exists(srctree): if args.fetch: if not os.path.isdir(srctree): raise DevtoolError("Cannot fetch into source tree path %s as " "it exists and is not a directory" % srctree) elif os.listdir(srctree): raise DevtoolError("Cannot fetch into source tree path %s as " "it already exists and is non-empty" % srctree) elif not args.fetch: raise DevtoolError("Specified source tree %s could not be found" % srctree) appendpath = os.path.join(config.workspace_path, 'appends') if not os.path.exists(appendpath): os.makedirs(appendpath) recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename) bb.utils.mkdirhier(recipedir) rfv = None if args.version: if '_' in args.version or ' ' in args.version: raise DevtoolError('Invalid version string "%s"' % args.version) rfv = args.version if args.fetch: if args.fetch.startswith('git://'): rfv = 'git' elif args.fetch.startswith('svn://'): rfv = 'svn' elif args.fetch.startswith('hg://'): rfv = 'hg' if rfv: bp = "%s_%s" % (args.recipename, rfv) else: bp = args.recipename recipefile = os.path.join(recipedir, "%s.bb" % bp) if args.color == 'auto' and sys.stdout.isatty(): color = 'always' else: color = args.color extracmdopts = '' if args.fetch: source = args.fetch extracmdopts = '-x %s' % srctree else: source = srctree if args.version: extracmdopts += ' -V %s' % args.version if args.binary: extracmdopts += ' -b' try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, recipefile, source, extracmdopts)) except bb.process.ExecutionError as e: raise DevtoolError('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) _add_md5(config, args.recipename, recipefile) if args.fetch and not args.no_git: setup_git_repo(srctree, args.version, 'devtool') initial_rev = None if os.path.exists(os.path.join(srctree, '.git')): (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) initial_rev = stdout.rstrip() tinfoil = setup_tinfoil(config_only=True, basepath=basepath) rd = oe.recipeutils.parse_recipe(recipefile, None, tinfoil.config_data) if not rd: return 1 appendfile = os.path.join(appendpath, '%s.bbappend' % bp) with open(appendfile, 'w') as f: f.write('inherit externalsrc\n') f.write('EXTERNALSRC = "%s"\n' % srctree) b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) if b_is_s: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: f.write('\n# initial_rev: %s\n' % initial_rev) if args.binary: f.write('do_install_append() {\n') f.write(' rm -rf ${D}/.git\n') f.write(' rm -f ${D}/singletask.lock\n') f.write('}\n') _add_md5(config, args.recipename, appendfile) logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile) tinfoil.shutdown() return 0
def sdk_update(args, config, basepath, workspace): # Fetch locked-sigs.inc file from remote/local destination updateserver = args.updateserver if not updateserver: updateserver = config.get('SDK', 'updateserver', '') logger.debug("updateserver: %s" % updateserver) # Make sure we are using sdk-update from within SDK logger.debug("basepath = %s" % basepath) old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc') if not os.path.exists(old_locked_sig_file_path): logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option") return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % basepath) if ':' in updateserver: is_remote = True else: is_remote = False layers_dir = os.path.join(basepath, 'layers') conf_dir = os.path.join(basepath, 'conf') # Grab variable values tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True) sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True) site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True) finally: tinfoil.shutdown() if not is_remote: # devtool sdk-update /local/path/to/latest/sdk new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc') if not os.path.exists(new_locked_sig_file_path): logger.error("%s doesn't exist or is not an extensible SDK" % updateserver) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % updateserver) update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) sstate_dir = os.path.join(newsdk_path, 'sstate-cache') if not os.path.exists(sstate_dir): logger.error("sstate-cache directory not found under %s" % newsdk_path) return 1 sstate_objects = get_sstate_objects(update_dict, sstate_dir) logger.debug("sstate_objects = %s" % sstate_objects) if len(sstate_objects) == 0: logger.info("No need to update.") return 0 logger.info("Installing sstate objects into %s", basepath) install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath) logger.info("Updating configuration files") new_conf_dir = os.path.join(updateserver, 'conf') shutil.rmtree(conf_dir) shutil.copytree(new_conf_dir, conf_dir) logger.info("Updating layers") new_layers_dir = os.path.join(updateserver, 'layers') shutil.rmtree(layers_dir) ret = subprocess.call("cp -a %s %s" % (new_layers_dir, layers_dir), shell=True) if ret != 0: logger.error("Copying %s to %s failed" % (new_layers_dir, layers_dir)) return ret else: # devtool sdk-update http://myhost/sdk tmpsdk_dir = tempfile.mkdtemp() try: os.makedirs(os.path.join(tmpsdk_dir, 'conf')) new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc') # Fetch manifest from server tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest') ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True) changedfiles = check_manifest(tmpmanifest, basepath) if not changedfiles: logger.info("Already up-to-date") return 0 # Update metadata logger.debug("Updating metadata via git ...") # Try using 'git pull', if failed, use 'git clone' if os.path.exists(os.path.join(basepath, 'layers/.git')): ret = subprocess.call("git pull %s/layers/.git" % updateserver, shell=True, cwd=layers_dir) else: ret = -1 if ret != 0: ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating metadata via git failed") return ret logger.debug("Updating conf files ...") for changedfile in changedfiles: ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating %s failed" % changedfile) return ret # Ok, all is well at this point - move everything over tmplayers_dir = os.path.join(tmpsdk_dir, 'layers') if os.path.exists(tmplayers_dir): shutil.rmtree(layers_dir) shutil.move(tmplayers_dir, layers_dir) for changedfile in changedfiles: destfile = os.path.join(basepath, changedfile) os.remove(destfile) shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile) os.remove(os.path.join(conf_dir, 'sdk-conf-manifest')) shutil.move(tmpmanifest, conf_dir) if not sstate_mirrors: with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: f.write('SCONF_VERSION = "%s"\n' % site_conf_version) f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) finally: shutil.rmtree(tmpsdk_dir) if not args.skip_prepare: # Find all potentially updateable tasks sdk_update_targets = [] tasks = ['do_populate_sysroot', 'do_packagedata'] for root, _, files in os.walk(stamps_dir): for fn in files: if not '.sigdata.' in fn: for task in tasks: if '.%s.' % task in fn or '.%s_setscene.' % task in fn: sdk_update_targets.append('%s:%s' % (os.path.basename(root), task)) # Run bitbake command for the whole SDK logger.info("Preparing build system... (This may take some time.)") try: exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) runlines = [] for line in output.splitlines(): if 'Running task ' in line: runlines.append(line) if runlines: logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines)) return -1 except bb.process.ExecutionError as e: logger.error('Preparation failed:\n%s' % e.stdout) return -1 return 0
def add(args, config, basepath, workspace): """Entry point for the devtool 'add' subcommand""" import bb import oe.recipeutils if args.recipename in workspace: logger.error("recipe %s is already in your workspace" % args.recipename) return -1 reason = oe.recipeutils.validate_pn(args.recipename) if reason: logger.error(reason) return -1 srctree = os.path.abspath(args.srctree) if os.path.exists(srctree): if args.fetch: if not os.path.isdir(srctree): logger.error("Cannot fetch into source tree path %s as it exists and is not a directory" % srctree) return 1 elif os.listdir(srctree): logger.error("Cannot fetch into source tree path %s as it already exists and is non-empty" % srctree) return 1 else: if not args.fetch: logger.error("Specified source tree %s could not be found" % srctree) return 1 appendpath = os.path.join(config.workspace_path, 'appends') if not os.path.exists(appendpath): os.makedirs(appendpath) recipedir = os.path.join(config.workspace_path, 'recipes', args.recipename) bb.utils.mkdirhier(recipedir) rfv = None if args.version: if '_' in args.version or ' ' in args.version: logger.error('Invalid version string "%s"' % args.version) return -1 rfv = args.version if args.fetch: if args.fetch.startswith('git://'): rfv = 'git' elif args.fetch.startswith('svn://'): rfv = 'svn' elif args.fetch.startswith('hg://'): rfv = 'hg' if rfv: bp = "%s_%s" % (args.recipename, rfv) else: bp = args.recipename recipefile = os.path.join(recipedir, "%s.bb" % bp) if sys.stdout.isatty(): color = 'always' else: color = args.color extracmdopts = '' if args.fetch: source = args.fetch extracmdopts = '-x %s' % srctree else: source = srctree if args.version: extracmdopts += ' -V %s' % args.version try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'recipetool --color=%s create -o %s "%s" %s' % (color, recipefile, source, extracmdopts)) logger.info('Recipe %s has been automatically created; further editing may be required to make it fully functional' % recipefile) except bb.process.ExecutionError as e: logger.error('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) return 1 _add_md5(config, args.recipename, recipefile) initial_rev = None if os.path.exists(os.path.join(srctree, '.git')): (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) initial_rev = stdout.rstrip() appendfile = os.path.join(appendpath, '%s.bbappend' % bp) with open(appendfile, 'w') as f: f.write('inherit externalsrc\n') f.write('EXTERNALSRC = "%s"\n' % srctree) if args.same_dir: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: f.write('\n# initial_rev: %s\n' % initial_rev) _add_md5(config, args.recipename, appendfile) return 0
def sdk_update(args, config, basepath, workspace): # Fetch locked-sigs.inc file from remote/local destination updateserver = args.updateserver if not updateserver: updateserver = config.get('SDK', 'updateserver', '') if not updateserver: raise DevtoolError( "Update server not specified in config file, you must specify it on the command line" ) logger.debug("updateserver: %s" % updateserver) # Make sure we are using sdk-update from within SDK logger.debug("basepath = %s" % basepath) old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc') if not os.path.exists(old_locked_sig_file_path): logger.error( "Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option" ) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % basepath) if ':' in updateserver: is_remote = True else: is_remote = False if not is_remote: # devtool sdk-update /local/path/to/latest/sdk new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc') if not os.path.exists(new_locked_sig_file_path): logger.error("%s doesn't exist or is not an extensible SDK" % updateserver) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % updateserver) update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) sstate_dir = os.path.join(newsdk_path, 'sstate-cache') if not os.path.exists(sstate_dir): logger.error("sstate-cache directory not found under %s" % newsdk_path) return 1 sstate_objects = get_sstate_objects(update_dict, sstate_dir) logger.debug("sstate_objects = %s" % sstate_objects) if len(sstate_objects) == 0: logger.info("No need to update.") return 0 logger.info("Installing sstate objects into %s", basepath) install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath) logger.info("Updating configuration files") new_conf_dir = os.path.join(updateserver, 'conf') old_conf_dir = os.path.join(basepath, 'conf') shutil.rmtree(old_conf_dir) shutil.copytree(new_conf_dir, old_conf_dir) logger.info("Updating layers") new_layers_dir = os.path.join(updateserver, 'layers') old_layers_dir = os.path.join(basepath, 'layers') shutil.rmtree(old_layers_dir) ret = subprocess.call("cp -a %s %s" % (new_layers_dir, old_layers_dir), shell=True) if ret != 0: logger.error("Copying %s to %s failed" % (new_layers_dir, old_layers_dir)) return ret else: # devtool sdk-update http://myhost/sdk tmpsdk_dir = '/tmp/sdk-ext' if os.path.exists(tmpsdk_dir): shutil.rmtree(tmpsdk_dir) os.makedirs(tmpsdk_dir) os.makedirs(os.path.join(tmpsdk_dir, 'conf')) # Fetch locked-sigs.inc from update server ret = subprocess.call( "wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True) if ret != 0: logger.error( "Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (updateserver, os.path.join(tmpsdk_dir, 'conf'))) return ret else: logger.info( "Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (updateserver, os.path.join(tmpsdk_dir, 'conf'))) new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf/locked-sigs.inc') update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) if len(update_dict) == 0: logger.info("No need to update.") return 0 # Update metadata logger.debug("Updating meta data via git ...") # Try using 'git pull', if failed, use 'git clone' if os.path.exists(os.path.join(basepath, 'layers/.git')): ret = subprocess.call("cd layers && git pull %s/layers/.git" % updateserver, shell=True) else: ret = -1 if ret != 0: ret = subprocess.call("rm -rf layers && git clone %s/layers/.git" % updateserver, shell=True) if ret != 0: logger.error("Updating meta data via git failed") return ret logger.debug("Updating conf files ...") conf_files = [ 'local.conf', 'bblayers.conf', 'devtool.conf', 'locked-sigs.inc' ] for conf in conf_files: ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (updateserver, conf, conf), shell=True) if ret != 0: logger.error("Update %s failed" % conf) return ret with open(os.path.join(basepath, 'conf/local.conf'), 'a') as f: f.write( 'SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) if not args.skip_prepare: # Run bitbake command for the whole SDK sdk_targets = config.get('SDK', 'sdk_targets') logger.info("Preparing build system... (This may take some time.)") try: exec_build_env_command(config.init_path, basepath, 'bitbake %s --setscene-only' % sdk_targets) except: logger.error('bitbake %s failed' % sdk_targets) return -1 return 0
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None): # remove <image>.bbappend to make sure setup_tinfoil doesn't # break because of it target_basename = config.get('SDK', 'target_basename', '') if target_basename: appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % target_basename) try: os.unlink(appendfile) except OSError as exc: if exc.errno != errno.ENOENT: raise tinfoil = setup_tinfoil(basepath=basepath) try: rd = parse_recipe(config, tinfoil, image, True) if not rd: # Error already shown return (1, None) if not bb.data.inherits_class('image', rd): raise TargetNotImageError() # Get the actual filename used and strip the .bb and full path target_basename = rd.getVar('FILE') target_basename = os.path.splitext(os.path.basename(target_basename))[0] config.set('SDK', 'target_basename', target_basename) config.write() appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % target_basename) outputdir = None try: if workspace or add_packages: if add_packages: packages = add_packages else: packages = _get_packages(tinfoil, workspace, config) else: packages = None if not task: if not packages and not add_packages and workspace: logger.warning('No recipes in workspace, building image %s unmodified', image) elif not packages: logger.warning('No packages to add, building image %s unmodified', image) if packages or extra_append: bb.utils.mkdirhier(os.path.dirname(appendfile)) with open(appendfile, 'w') as afile: if packages: # include packages from workspace recipes into the image afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) if not task: logger.info('Building image %s with the following ' 'additional packages: %s', image, ' '.join(packages)) if extra_append: for line in extra_append: afile.write('%s\n' % line) if task in ['populate_sdk', 'populate_sdk_ext']: outputdir = rd.getVar('SDK_DEPLOY') else: outputdir = rd.getVar('DEPLOY_DIR_IMAGE') tmp_tinfoil = tinfoil tinfoil = None tmp_tinfoil.shutdown() options = '' if task: options += '-c %s' % task # run bitbake to build image (or specified task) try: exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, image), watch=True) except ExecutionError as err: return (err.exitcode, None) finally: if os.path.isfile(appendfile): os.unlink(appendfile) finally: if tinfoil: tinfoil.shutdown() return (0, outputdir)
def sdk_update(args, config, basepath, workspace): # Fetch locked-sigs.inc file from remote/local destination updateserver = args.updateserver if not updateserver: updateserver = config.get('SDK', 'updateserver', '') if not updateserver: raise DevtoolError("Update server not specified in config file, you must specify it on the command line") logger.debug("updateserver: %s" % updateserver) # Make sure we are using sdk-update from within SDK logger.debug("basepath = %s" % basepath) old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc') if not os.path.exists(old_locked_sig_file_path): logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option") return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % basepath) if ':' in updateserver: is_remote = True else: is_remote = False if not is_remote: # devtool sdk-update /local/path/to/latest/sdk new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc') if not os.path.exists(new_locked_sig_file_path): logger.error("%s doesn't exist or is not an extensible SDK" % updateserver) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % updateserver) update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) sstate_dir = os.path.join(newsdk_path, 'sstate-cache') if not os.path.exists(sstate_dir): logger.error("sstate-cache directory not found under %s" % newsdk_path) return 1 sstate_objects = get_sstate_objects(update_dict, sstate_dir) logger.debug("sstate_objects = %s" % sstate_objects) if len(sstate_objects) == 0: logger.info("No need to update.") return 0 logger.info("Installing sstate objects into %s", basepath) install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath) logger.info("Updating configuration files") new_conf_dir = os.path.join(updateserver, 'conf') old_conf_dir = os.path.join(basepath, 'conf') shutil.rmtree(old_conf_dir) shutil.copytree(new_conf_dir, old_conf_dir) logger.info("Updating layers") new_layers_dir = os.path.join(updateserver, 'layers') old_layers_dir = os.path.join(basepath, 'layers') shutil.rmtree(old_layers_dir) ret = subprocess.call("cp -a %s %s" % (new_layers_dir, old_layers_dir), shell=True) if ret != 0: logger.error("Copying %s to %s failed" % (new_layers_dir, old_layers_dir)) return ret else: # devtool sdk-update http://myhost/sdk tmpsdk_dir = '/tmp/sdk-ext' if os.path.exists(tmpsdk_dir): shutil.rmtree(tmpsdk_dir) os.makedirs(tmpsdk_dir) os.makedirs(os.path.join(tmpsdk_dir, 'conf')) # Fetch locked-sigs.inc from update server ret = subprocess.call("wget -q -O - %s/conf/locked-sigs.inc > %s/locked-sigs.inc" % (updateserver, os.path.join(tmpsdk_dir, 'conf')), shell=True) if ret != 0: logger.error("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc failed" % (updateserver, os.path.join(tmpsdk_dir, 'conf'))) return ret else: logger.info("Fetching conf/locked-sigs.inc from %s to %s/locked-sigs.inc succeeded" % (updateserver, os.path.join(tmpsdk_dir, 'conf'))) new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf/locked-sigs.inc') update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) if len(update_dict) == 0: logger.info("No need to update.") return 0 # Update metadata logger.debug("Updating meta data via git ...") # Try using 'git pull', if failed, use 'git clone' if os.path.exists(os.path.join(basepath, 'layers/.git')): ret = subprocess.call("cd layers && git pull %s/layers/.git" % updateserver, shell=True) else: ret = -1 if ret != 0: ret = subprocess.call("rm -rf layers && git clone %s/layers/.git" % updateserver, shell=True) if ret != 0: logger.error("Updating meta data via git failed") return ret logger.debug("Updating conf files ...") conf_files = ['local.conf', 'bblayers.conf', 'devtool.conf', 'locked-sigs.inc'] for conf in conf_files: ret = subprocess.call("wget -q -O - %s/conf/%s > conf/%s" % (updateserver, conf, conf), shell=True) if ret != 0: logger.error("Update %s failed" % conf) return ret with open(os.path.join(basepath, 'conf/local.conf'), 'a') as f: f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) if not args.skip_prepare: # Run bitbake command for the whole SDK sdk_targets = config.get('SDK', 'sdk_targets') logger.info("Preparing build system... (This may take some time.)") try: exec_build_env_command(config.init_path, basepath, 'bitbake %s --setscene-only' % sdk_targets) except: logger.error('bitbake %s failed' % sdk_targets) return -1 return 0
def build_image(args, config, basepath, workspace): """Entry point for the devtool 'build-image' subcommand.""" image = args.imagename auto_image = False if not image: sdk_targets = config.get('SDK', 'sdk_targets', '').split() if sdk_targets: image = sdk_targets[0] auto_image = True if not image: raise DevtoolError('Unable to determine image to build, please specify one') appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % image) # remove <image>.bbappend to make sure setup_tinfoil doesn't # break because of it if os.path.isfile(appendfile): os.unlink(appendfile) tinfoil = setup_tinfoil(basepath=basepath) rd = parse_recipe(config, tinfoil, image, True) if not rd: # Error already shown return 1 if not bb.data.inherits_class('image', rd): if auto_image: raise DevtoolError('Unable to determine image to build, please specify one') else: raise DevtoolError('Specified recipe %s is not an image recipe' % image) try: if workspace: packages = _get_packages(tinfoil, workspace, config) if packages: with open(appendfile, 'w') as afile: # include packages from workspace recipes into the image afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) logger.info('Building image %s with the following ' 'additional packages: %s', image, ' '.join(packages)) else: logger.warning('No packages to add, building image %s unmodified', image) else: logger.warning('No recipes in workspace, building image %s unmodified', image) deploy_dir_image = tinfoil.config_data.getVar('DEPLOY_DIR_IMAGE', True) tinfoil.shutdown() # run bitbake to build image try: exec_build_env_command(config.init_path, basepath, 'bitbake %s' % image, watch=True) except ExecutionError as err: return err.exitcode finally: if os.path.isfile(appendfile): os.unlink(appendfile) logger.info('Successfully built %s. You can find output files in %s' % (image, deploy_dir_image))
def configure_help(args, config, basepath, workspace): """Entry point for the devtool 'configure-help' subcommand""" import oe.utils check_workspace_recipe(workspace, args.recipename) tinfoil = setup_tinfoil(config_only=False, basepath=basepath) try: rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False) if not rd: return 1 b = rd.getVar('B', True) s = rd.getVar('S', True) configurescript = os.path.join(s, 'configure') confdisabled = 'noexec' in rd.getVarFlags( 'do_configure') or 'do_configure' not in (rd.getVar( '__BBTASKS', False) or []) configureopts = oe.utils.squashspaces( rd.getVar('CONFIGUREOPTS', True) or '') extra_oeconf = oe.utils.squashspaces( rd.getVar('EXTRA_OECONF', True) or '') extra_oecmake = oe.utils.squashspaces( rd.getVar('EXTRA_OECMAKE', True) or '') do_configure = rd.getVar('do_configure', True) or '' do_configure_noexpand = rd.getVar('do_configure', False) or '' packageconfig = rd.getVarFlags('PACKAGECONFIG') or [] autotools = bb.data.inherits_class( 'autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure) cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure) cmake_do_configure = rd.getVar('cmake_do_configure', True) pn = rd.getVar('PN', True) finally: tinfoil.shutdown() if 'doc' in packageconfig: del packageconfig['doc'] if autotools and not os.path.exists(configurescript): logger.info('Running do_configure to generate configure script') try: stdout, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -c configure %s' % msg, args.recipename, stderr=subprocess.STDOUT) except bb.process.ExecutionError: pass if confdisabled or do_configure.strip() in ('', ':'): raise DevtoolError( "do_configure task has been disabled for this recipe") elif args.no_pager and not os.path.exists(configurescript): raise DevtoolError( "No configure script found and no other information to display") else: configopttext = '' if autotools and configureopts: configopttext = ''' Arguments currently passed to the configure script: %s Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf) if extra_oeconf: configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily): %s''' % extra_oeconf elif cmake: in_cmake = False cmake_cmd = '' for line in cmake_do_configure.splitlines(): if in_cmake: cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\') if not line.endswith('\\'): break if line.lstrip().startswith('cmake '): cmake_cmd = line.strip().rstrip('\\') if line.endswith('\\'): in_cmake = True else: break if cmake_cmd: configopttext = ''' The current cmake command line: %s Arguments specified through EXTRA_OECMAKE (which you can change or add to easily) %s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake) else: configopttext = ''' The current implementation of cmake_do_configure: cmake_do_configure() { %s } Arguments specified through EXTRA_OECMAKE (which you can change or add to easily) %s''' % (cmake_do_configure.rstrip(), extra_oecmake) elif do_configure: configopttext = ''' The current implementation of do_configure: do_configure() { %s }''' % do_configure.rstrip() if '${EXTRA_OECONF}' in do_configure_noexpand: configopttext += ''' Arguments specified through EXTRA_OECONF (which you can change or add to easily): %s''' % extra_oeconf if packageconfig: configopttext += ''' Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.''' if args.arg: helpargs = ' '.join(args.arg) elif cmake: helpargs = '-LH' else: helpargs = '--help' msg = '''configure information for %s ------------------------------------------ %s''' % (pn, configopttext) if cmake: msg += ''' The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces). ------------------------------------------''' % (helpargs, pn) elif os.path.exists(configurescript): msg += ''' The ./configure %s output for %s follows. ------------------------------------------''' % (helpargs, pn) olddir = os.getcwd() tmppath = tempfile.mkdtemp() with tempfile.NamedTemporaryFile('w', delete=False) as tf: if not args.no_header: tf.write(msg + '\n') tf.close() try: try: cmd = 'cat %s' % tf.name if cmake: cmd += '; cmake %s %s 2>&1' % (helpargs, s) os.chdir(b) elif os.path.exists(configurescript): cmd += '; %s %s' % (configurescript, helpargs) if sys.stdout.isatty() and not args.no_pager: pager = os.environ.get('PAGER', 'less') cmd = '(%s) | %s' % (cmd, pager) subprocess.check_call(cmd, shell=True) except subprocess.CalledProcessError as e: return e.returncode finally: os.chdir(olddir) shutil.rmtree(tmppath) os.remove(tf.name)
def sdk_update(args, config, basepath, workspace): # Fetch locked-sigs.inc file from remote/local destination updateserver = args.updateserver if not updateserver: updateserver = config.get('SDK', 'updateserver', '') logger.debug("updateserver: %s" % updateserver) # Make sure we are using sdk-update from within SDK logger.debug("basepath = %s" % basepath) old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc') if not os.path.exists(old_locked_sig_file_path): logger.error( "Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option" ) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % basepath) if ':' in updateserver: is_remote = True else: is_remote = False layers_dir = os.path.join(basepath, 'layers') conf_dir = os.path.join(basepath, 'conf') # Grab variable values tinfoil = setup_tinfoil(config_only=True, basepath=basepath) try: stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR', True) sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS', True) site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION', True) finally: tinfoil.shutdown() if not is_remote: # devtool sdk-update /local/path/to/latest/sdk new_locked_sig_file_path = os.path.join(updateserver, 'conf/locked-sigs.inc') if not os.path.exists(new_locked_sig_file_path): logger.error("%s doesn't exist or is not an extensible SDK" % updateserver) return -1 else: logger.debug("Found conf/locked-sigs.inc in %s" % updateserver) update_dict = generate_update_dict(new_locked_sig_file_path, old_locked_sig_file_path) logger.debug("update_dict = %s" % update_dict) newsdk_path = updateserver sstate_dir = os.path.join(newsdk_path, 'sstate-cache') if not os.path.exists(sstate_dir): logger.error("sstate-cache directory not found under %s" % newsdk_path) return 1 sstate_objects = get_sstate_objects(update_dict, sstate_dir) logger.debug("sstate_objects = %s" % sstate_objects) if len(sstate_objects) == 0: logger.info("No need to update.") return 0 logger.info("Installing sstate objects into %s", basepath) install_sstate_objects(sstate_objects, updateserver.rstrip('/'), basepath) logger.info("Updating configuration files") new_conf_dir = os.path.join(updateserver, 'conf') shutil.rmtree(conf_dir) shutil.copytree(new_conf_dir, conf_dir) logger.info("Updating layers") new_layers_dir = os.path.join(updateserver, 'layers') shutil.rmtree(layers_dir) ret = subprocess.call("cp -a %s %s" % (new_layers_dir, layers_dir), shell=True) if ret != 0: logger.error("Copying %s to %s failed" % (new_layers_dir, layers_dir)) return ret else: # devtool sdk-update http://myhost/sdk tmpsdk_dir = tempfile.mkdtemp() try: os.makedirs(os.path.join(tmpsdk_dir, 'conf')) new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc') # Fetch manifest from server tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest') ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True) changedfiles = check_manifest(tmpmanifest, basepath) if not changedfiles: logger.info("Already up-to-date") return 0 # Update metadata logger.debug("Updating metadata via git ...") # Try using 'git pull', if failed, use 'git clone' if os.path.exists(os.path.join(basepath, 'layers/.git')): ret = subprocess.call("git pull %s/layers/.git" % updateserver, shell=True, cwd=layers_dir) else: ret = -1 if ret != 0: ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating metadata via git failed") return ret logger.debug("Updating conf files ...") for changedfile in changedfiles: ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir) if ret != 0: logger.error("Updating %s failed" % changedfile) return ret # Ok, all is well at this point - move everything over tmplayers_dir = os.path.join(tmpsdk_dir, 'layers') if os.path.exists(tmplayers_dir): shutil.rmtree(layers_dir) shutil.move(tmplayers_dir, layers_dir) for changedfile in changedfiles: destfile = os.path.join(basepath, changedfile) os.remove(destfile) shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile) os.remove(os.path.join(conf_dir, 'sdk-conf-manifest')) shutil.move(tmpmanifest, conf_dir) if not sstate_mirrors: with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: f.write('SCONF_VERSION = "%s"\n' % site_conf_version) f.write( 'SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) finally: shutil.rmtree(tmpsdk_dir) if not args.skip_prepare: # Find all potentially updateable tasks sdk_update_targets = [] tasks = ['do_populate_sysroot', 'do_packagedata'] for root, _, files in os.walk(stamps_dir): for fn in files: if not '.sigdata.' in fn: for task in tasks: if '.%s.' % task in fn or '.%s_setscene.' % task in fn: sdk_update_targets.append( '%s:%s' % (os.path.basename(root), task)) # Run bitbake command for the whole SDK logger.info("Preparing build system... (This may take some time.)") try: exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT) runlines = [] for line in output.splitlines(): if 'Running task ' in line: runlines.append(line) if runlines: logger.error( 'Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines)) return -1 except bb.process.ExecutionError as e: logger.error('Preparation failed:\n%s' % e.stdout) return -1 return 0
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None): # remove <image>.bbappend to make sure setup_tinfoil doesn't # break because of it target_basename = config.get('SDK', 'target_basename', '') if target_basename: appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % target_basename) try: os.unlink(appendfile) except OSError as exc: if exc.errno != errno.ENOENT: raise tinfoil = setup_tinfoil(basepath=basepath) rd = parse_recipe(config, tinfoil, image, True) if not rd: # Error already shown return (1, None) if not bb.data.inherits_class('image', rd): raise TargetNotImageError() # Get the actual filename used and strip the .bb and full path target_basename = rd.getVar('FILE', True) target_basename = os.path.splitext(os.path.basename(target_basename))[0] config.set('SDK', 'target_basename', target_basename) config.write() appendfile = os.path.join(config.workspace_path, 'appends', '%s.bbappend' % target_basename) outputdir = None try: if workspace or add_packages: if add_packages: packages = add_packages else: packages = _get_packages(tinfoil, workspace, config) else: packages = None if not task: if not packages and not add_packages and workspace: logger.warning('No recipes in workspace, building image %s unmodified', image) elif not packages: logger.warning('No packages to add, building image %s unmodified', image) if packages or extra_append: bb.utils.mkdirhier(os.path.dirname(appendfile)) with open(appendfile, 'w') as afile: if packages: # include packages from workspace recipes into the image afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) if not task: logger.info('Building image %s with the following ' 'additional packages: %s', image, ' '.join(packages)) if extra_append: for line in extra_append: afile.write('%s\n' % line) if task in ['populate_sdk', 'populate_sdk_ext']: outputdir = rd.getVar('SDK_DEPLOY', True) else: outputdir = rd.getVar('DEPLOY_DIR_IMAGE', True) tinfoil.shutdown() options = '' if task: options += '-c %s' % task # run bitbake to build image (or specified task) try: exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, image), watch=True) except ExecutionError as err: return (err.exitcode, None) finally: if os.path.isfile(appendfile): os.unlink(appendfile) return (0, outputdir)
def deploy(args, config, basepath, workspace): import re from devtool import exec_build_env_command if not args.recipename in workspace: logger.error("no recipe named %s in your workspace" % args.recipename) return -1 try: host, destdir = args.target.split(':') except ValueError: destdir = '/' else: args.target = host deploy_dir = os.path.join(basepath, 'target_deploy', args.target) deploy_file = os.path.join(deploy_dir, args.recipename + '.list') stdout, stderr = exec_build_env_command(config.init_path, basepath, 'bitbake -e %s' % args.recipename, shell=True) recipe_outdir = re.search(r'^D="(.*)"', stdout, re.MULTILINE).group(1) if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): logger.error( 'No files to deploy - have you built the %s recipe? If so, the install step has not installed any files.' % args.recipename) return -1 if args.dry_run: print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) for root, dirs, files in os.walk(recipe_outdir): for fn in files: print(' %s' % os.path.join( destdir, os.path.relpath(root, recipe_outdir), fn)) return 0 if os.path.exists(deploy_file): if undeploy(args, config, basepath, workspace): # Error already shown return -1 extraoptions = '' if args.no_host_check: extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' if not args.show_status: extraoptions += ' -q' ret = subprocess.call('scp -r %s %s/* %s:%s' % (extraoptions, recipe_outdir, args.target, destdir), shell=True) if ret != 0: logger.error( 'Deploy failed - rerun with -s to get a complete error message') return ret logger.info('Successfully deployed %s' % recipe_outdir) if not os.path.exists(deploy_dir): os.makedirs(deploy_dir) files_list = [] for root, _, files in os.walk(recipe_outdir): for filename in files: filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) files_list.append(os.path.join(destdir, filename)) with open(deploy_file, 'w') as fobj: fobj.write('\n'.join(files_list)) return 0