def run(self, config, args): cookbook = CookBook(config) recipe_name = args.recipe[0] recursive = args.recursive recipe = cookbook.get_recipe(recipe_name) if recursive: ordered_recipes = cookbook.list_recipe_deps(recipe_name) else: ordered_recipes = [recipe] for recipe in ordered_recipes: if cookbook.recipe_needs_build(recipe.name): raise FatalError(_("Recipe %s is not built yet" % recipe.name)) for recipe in ordered_recipes: # call step function stepfunc = None try: stepfunc = getattr(recipe, 'check') except: m.message('%s has no check step, skipped' % recipe.name) if stepfunc: try: stepfunc() except FatalError as e: raise e except Exception as ex: raise FatalError(_("Error running %s checks: %s") % (recipe.name, ex))
def __init__(self, force=None, no_deps=None): args = [ ArgparseArgument('recipe', nargs='*', help=_('name of the recipe to build')), ArgparseArgument('--missing-files', action='store_true', default=False, help=_('prints a list of files installed that are ' 'listed in the recipe')), ArgparseArgument('--dry-run', action='store_true', default=False, help=_('only print commands instead of running them '))] if force is None: args.append( ArgparseArgument('--force', action='store_true', default=False, help=_('force the build of the recipe ingoring ' 'its cached state'))) if no_deps is None: args.append( ArgparseArgument('--no-deps', action='store_true', default=False, help=_('do not build dependencies'))) self.force = force self.no_deps = no_deps Command.__init__(self, args)
def _create_packages(self): for p in self.packages: m.action(_("Creating package %s ") % p) packager = OSXPackage(self.config, p, self.store) try: paths = packager.pack( self.output_dir, self.devel, self.force, self.keep_temp, self.package.version, install_dir=self.package.get_install_dir(), include_dirs=self.include_dirs, sdk_version=self.package.sdk_version, ) m.action(_("Package created sucessfully")) except EmptyPackageError: paths = [None, None] if paths[0] is not None: self.packages_paths[PackageType.RUNTIME][p] = paths[0] else: self.empty_packages[PackageType.RUNTIME].append(p) if paths[1] is not None: self.packages_paths[PackageType.DEVEL][p] = paths[1] else: self.empty_packages[PackageType.DEVEL].append(p)
def run(self, config, args): self.store = PackagesStore(config) p = self.store.get_package(args.package[0]) if args.skip_deps_build and args.only_build_deps: raise UsageError(_("Cannot use --skip-deps-build together with " "--only-build-deps")) if not args.skip_deps_build: self._build_deps(config, p, args.no_devel) if args.only_build_deps: return if p is None: raise PackageNotFoundError(args.package[0]) if args.tarball: pkg = DistTarball(config, p, self.store) else: pkg = Packager(config, p, self.store) m.action(_("Creating package for %s") % p.name) if args.tarball: paths = pkg.pack(os.path.abspath(args.output_dir), args.no_devel, args.force, args.keep_temp, split=not args.no_split) else: paths = pkg.pack(os.path.abspath(args.output_dir), args.no_devel, args.force, args.keep_temp) if None in paths: paths.remove(None) p.post_install(paths) m.action(_("Package successfully created in %s") % ' '.join([os.path.abspath(x) for x in paths]))
def run(self, config, args): cookbook = CookBook(config) failed = [] p_name = args.package[0] store = PackagesStore(config) p = store.get_package(p_name) ordered_recipes = map(lambda x: cookbook.get_recipe(x), p.recipes_dependencies()) for recipe in ordered_recipes: if cookbook.recipe_needs_build(recipe.name): raise CommandError(_("Recipe %s is not built yet" % recipe.name)) for recipe in ordered_recipes: # call step function stepfunc = None try: stepfunc = getattr(recipe, 'check') except: m.message('%s has no check step, skipped' % recipe.name) if stepfunc: try: m.message('Running checks for recipe %s' % recipe.name) stepfunc() except Exception, ex: failed.append(recipe.name) m.warning(_("%s checks failed: %s") % (recipe.name, ex))
def fetch(self, cookbook, recipes, no_deps, reset_rdeps): fetch_recipes = [] if not recipes: fetch_recipes = cookbook.get_recipes_list() elif no_deps: fetch_recipes = [cookbook.get_recipe(x) for x in recipes] else: for recipe in recipes: fetch_recipes += cookbook.list_recipe_deps(recipe) fetch_recipes = remove_list_duplicates (fetch_recipes) m.message(_("Fetching the following recipes: %s") % ' '.join([x.name for x in fetch_recipes])) to_rebuild = [] for i in range(len(fetch_recipes)): recipe = fetch_recipes[i] m.build_step(i + 1, len(fetch_recipes), recipe, 'Fetch') recipe.fetch() bv = cookbook.recipe_built_version(recipe.name) cv = recipe.built_version() if bv != cv: to_rebuild.append(recipe) cookbook.reset_recipe_status(recipe.name) if reset_rdeps: for r in cookbook.list_recipe_reverse_deps(recipe.name): to_rebuild.append(r) cookbook.reset_recipe_status(r.name) if to_rebuild: m.message(_("These recipes have been updated and will " "be rebuilt:\n%s") % '\n'.join([x.name for x in to_rebuild]))
def _cook_recipe(self, recipe, count, total): if not self.cookbook.recipe_needs_build(recipe.name) and \ not self.force: m.build_step(count, total, recipe.name, _("already built")) return if self.missing_files: # create a temp file that will be used to find newer files tmp = tempfile.NamedTemporaryFile() recipe.force = self.force for desc, step in recipe.steps: m.build_step(count, total, recipe.name, step) # check if the current step needs to be done if self.cookbook.step_done(recipe.name, step) and not self.force: m.action(_("Step done")) continue try: # call step function stepfunc = getattr(recipe, step) if not stepfunc: raise FatalError(_('Step %s not found') % step) stepfunc() # update status successfully self.cookbook.update_step_status(recipe.name, step) except FatalError: self._handle_build_step_error(recipe, step) except Exception: raise BuildStepError(recipe, step, traceback.format_exc()) self.cookbook.update_build_status(recipe.name, recipe.built_version()) if self.missing_files: self._print_missing_files(recipe, tmp)
def fetch(self, redownload=False): if not os.path.exists(self.repo_dir): os.makedirs(self.repo_dir) cached_file = os.path.join(self.config.cached_sources, self.package_name, self.tarball_name) if not redownload and os.path.isfile(cached_file) and self.verify(cached_file, fatal=False): m.action(_('Copying cached tarball from %s to %s instead of %s') % (cached_file, self.download_path, self.url)) shutil.copy(cached_file, self.download_path) return if self.offline: if not os.path.isfile(self.download_path): msg = 'Offline mode: tarball {!r} not found in cached sources ({}) or local sources ({})' raise FatalError(msg.format(self.tarball_name, self.config.cached_sources, self.repo_dir)) self.verify() m.action(_('Found tarball for %s at %s') % (self.url, self.download_path)) return m.action(_('Fetching tarball %s to %s') % (self.url, self.download_path)) # Enable certificate checking only on Linux for now # FIXME: Add more platforms here after testing cc = self.config.platform == Platform.LINUX try: shell.download(self.url, self.download_path, check_cert=cc, overwrite=redownload) except (FatalError, urllib.error.URLError): # Try our mirror shell.download(self.mirror_url, self.download_path, check_cert=cc, overwrite=redownload) self.verify()
def __init__(self): Command.__init__(self, [ArgparseArgument('package', nargs=1, help=_('name of the package to create')), ArgparseArgument('-o', '--output-dir', default='.', help=_('Output directory for the tarball file')), ArgparseArgument('-t', '--tarball', action='store_true', default=False, help=_('Creates a tarball instead of a native package')), ArgparseArgument('-n', '--no-split', action='store_true', default=False, help=_('(only meaningfull when --tarball is set) Create one single ' 'tarball with devel and runtime files')), ArgparseArgument('-f', '--force', action='store_true', default=False, help=_('Delete any existing package file')), ArgparseArgument('-d', '--no-devel', action='store_false', default=True, help=_('Do not create the development version ' 'of this package')), ArgparseArgument('-s', '--skip-deps-build', action='store_true', default=False, help=_('Do not build the recipes needed to ' 'create this package (conflicts with --only-build-deps)')), ArgparseArgument('-b', '--only-build-deps', action='store_true', default=False, help=_('Only build the recipes needed to ' 'create this package (conflicts with --skip-deps-build)')), ArgparseArgument('-k', '--keep-temp', action='store_true', default=False, help=_('Keep temporary files for debug')), ArgparseArgument('--offline', action='store_true', default=False, help=_('Use only the source cache, no network')), ])
def run(self, config, args): cookbook = CookBook(config) if args.recipe == 'all': recipes = cookbook.get_recipes_list() else: recipes = [cookbook.get_recipe(args.recipe)] if len(recipes) == 0: m.message(_("No recipes found")) tagname = args.tagname tagdescription = args.tagdescription force = args.force for recipe in recipes: if recipe.stype != SourceType.GIT and \ recipe.stype != SourceType.GIT_TARBALL: m.message(_("Recipe '%s' has a custom source repository, " "skipping") % recipe.name) continue recipe.fetch(checkout=False) tags = git.list_tags(recipe.repo_dir) exists = (tagname in tags) if exists: if not force: m.warning(_("Recipe '%s' tag '%s' already exists, " "not updating" % (recipe.name, tagname))) continue git.delete_tag(recipe.repo_dir, tagname) commit = 'origin/sdk-%s' % recipe.version git.create_tag(recipe.repo_dir, tagname, tagdescription, commit)
def run(self, config, args): to_remove = [os.path.join(CONFIG_DIR, config.cache_file)] to_remove.append(config.prefix) to_remove.append(config.sources) if (args.build_tools): to_remove.append(os.path.join(CONFIG_DIR, config.build_tools_cache)) to_remove.append(config.build_tools_prefix) to_remove.append(config.build_tools_sources) if args.force: self.wipe(to_remove) return options = ['yes', 'no'] msg = _("WARNING!!!\n" "This command will delete cerbero's build cache, " "the sources directory and the builds directory " "to reset the build system to its initial state.\n" "The following paths will be removed:\n%s\n" "Do you want to continue?" % '\n'.join(to_remove)) # Ask once if shell.prompt(msg, options) == options[0]: msg = _("Are you sure?") # Ask twice if shell.prompt(msg, options) == options[0]: # Start with the Apocalypse self.wipe(to_remove)
def __init__(self): Command.__init__(self, [ArgparseArgument('recipe', nargs=1, help=_('name of the recipe to run checks on')), ArgparseArgument('--recursive', action='store_true', default=False, help=_('Recursively run checks on dependencies')), ])
def _create_package(self, config, p, args): if args.type == 'native': pkg = Packager(config, p, self.store) else: pkg = DistArchive(config, p, self.store, args.type) m.action(_("Creating package for %s") % p.name) p.pre_package() paths = pkg.pack(os.path.abspath(args.output_dir), not args.no_devel, args.force, args.keep_temp) if None in paths: paths.remove(None) if '' in paths: paths.remove('') paths = p.post_package(paths) or paths for p in paths: BUF_SIZE = 65536 # 64kb chunks sha1 = hashlib.sha1() with open(os.path.abspath(p), 'rb') as f: while True: data = f.read(BUF_SIZE) if not data: break sha1.update(data) sha1sum = sha1.hexdigest() m.action(_("Package successfully created in %s %s") % (os.path.abspath(p), sha1sum)) # Generate the sha1sum file with open('%s.sha1' % p, 'w+') as sha1file: sha1file.write(sha1sum) return paths
def pack(self, output_dir, devel=True, force=False, keep_temp=False, split=True, package_prefix=""): try: dist_files = self.files_list(PackageType.RUNTIME, force) except EmptyPackageError: m.warning(_("The runtime package is empty")) dist_files = [] if devel: try: devel_files = self.files_list(PackageType.DEVEL, force) except EmptyPackageError: m.warning(_("The development package is empty")) devel_files = [] else: devel_files = [] if not split: dist_files += devel_files if not dist_files and not devel_files: raise EmptyPackageError(self.package.name) filenames = [] if dist_files: runtime = self._create_tarball(output_dir, PackageType.RUNTIME, dist_files, force, package_prefix) filenames.append(runtime) if split and devel and len(devel_files) != 0: devel = self._create_tarball(output_dir, PackageType.DEVEL, devel_files, force, package_prefix) filenames.append(devel) return filenames
def __init__(self, args=[]): args.append(ArgparseArgument('--reset-rdeps', action='store_true', default=False, help=_('reset the status of reverse ' 'dependencies too'))) args.append(ArgparseArgument('--full-reset', action='store_true', default=False, help=_('reset to extract step if rebuild is needed'))) Command.__init__(self, args)
def __new__(klass, config, build_tools_only): bs = [] bs.append(BuildTools(config)) if build_tools_only: return bs target_distro = config.target_distro distro = config.distro target_distro_version = config.target_distro_version distro_version = config.distro_version # Try to find a bootstrapper for the distro-distro_version combination, # both for the target host and the build one. For instance, when # bootstraping to cross-compile for windows we also need to bootstrap # the build host. target = (target_distro, target_distro_version) build = (distro, distro_version) if target == build: blist = [target] else: blist = [target, build] for d, v in blist: if d not in bootstrappers: raise FatalError(_("No bootstrapper for the distro %s" % d)) if v not in bootstrappers[d]: # Be tolerant with the distro version m.warning(_("No bootstrapper for the distro version %s" % v)) v = None bs.insert(0, bootstrappers[d][v](config)) return bs
def _cook_recipe(self, recipe, count, total): if not self.cookbook.recipe_needs_build(recipe.name) and \ not self.force: m.build_step(count, total, recipe.name, _("already built")) return if self.missing_files: # create a temp file that will be used to find newer files tmp = tempfile.NamedTemporaryFile() recipe.force = self.force for desc, step in recipe.steps: m.build_step(count, total, recipe.name, step) # check if the current step needs to be done if self.cookbook.step_done(recipe.name, step) and not self.force: m.action(_("Step done")) continue try: # call step function stepfunc = getattr(recipe, step) if not stepfunc: raise FatalError(_('Step %s not found') % step) shell.set_logfile_output("%s/%s-%s.log" % (recipe.config.logs, recipe, step)) stepfunc() # update status successfully self.cookbook.update_step_status(recipe.name, step) shell.close_logfile_output() except FatalError, e: shell.close_logfile_output(dump=True) self._handle_build_step_error(recipe, step, e.arch) except Exception: shell.close_logfile_output(dump=True) raise BuildStepError(recipe, step, traceback.format_exc())
def __init__(self): Command.__init__(self, [ArgparseArgument('package', nargs=1, help=_('name of the package')), ArgparseArgument('-l', '--list-files', action='store_true', default=False, help=_('List all files installed by this package')), ])
def __init__(self): Command.__init__(self, [ArgparseArgument('-o', '--output_dir', default='.', help=_('output directory where .vsprops files will be saved')), ArgparseArgument('-p', '--prefix', default=DEFAULT_PREFIX_MACRO, help=_('name of the prefix environment variable ' '(eg:CERBERO_SDK_ROOT_X86)')), ])
def __init__(self): args = [ ArgparseArgument('package', nargs=1, help=_('package to fetch')), ArgparseArgument('--deps', action='store_false', default=True, help=_('also fetch dependencies')), ] Fetch.__init__(self, args)
def __init__(self): Command.__init__(self, [ArgparseArgument('recipe', nargs=1, help=_('name of the recipe')), ArgparseArgument('--all', action='store_true', default=False, help=_('list all dependencies, including the ' 'build ones')), ])
def create_parser(self): ''' Creates the arguments parser ''' # argparse.ArgumentParser() # Create a new ArgumentParser object, with description init https://docs.python.org/2/library/argparse.html?highlight=argparse.argumentparser#argparse.ArgumentParser.add_argument self.parser = argparse.ArgumentParser(description=_(description)) # Define how a single comand-line argument should be parsed. self.parser.add_argument('-c', '--config', action='append', type=str, default=None, help=_('Configuration file used for the build'))
def __init__(self): Command.__init__(self, [ArgparseArgument('cmd', nargs='+', help=_('command to run')), ArgparseArgument('-v', '--verbose', action='store_true', default=False, help=_('verbose mode')) ])
def __init__(self): args = [ ArgparseArgument('recipes', nargs='*', help=_('list of the recipes to fetch (fetch all if none ' 'is passed)')), ArgparseArgument('--no-deps', action='store_true', default=False, help=_('do not fetch dependencies')), ] Fetch.__init__(self, args)
def __init__(self): args = [ ArgparseArgument('--dry-run', action='store_true', default=False, help=_('only print commands instead of running them ')), ArgparseArgument('-f', '--force', action='store_true', default=False, help=_('Force the creation of the binary package')), ] Command.__init__(self, args)
def __init__(self): Command.__init__(self, [ ArgparseArgument('--force', action='store_true', default=False, help=_('force the deletion of everything without user ' 'input')), ArgparseArgument('--build-tools', action='store_true', default=False, help=_('wipe the build tools too'))])
def __init__(self): Command.__init__(self, [ArgparseArgument('-o', '--output_dir', default='.', help=_('output directory where .xcconfig files will be saved')), ArgparseArgument('-f', '--filename', default=None, help=_('filename of the .xcconfig file')), ArgparseArgument('libraries', nargs='*', help=_('List of libraries to include')), ])
def __init__(self): Command.__init__( self, [ ArgparseArgument("name", nargs=1, default="sdk-shell", help=_("name of the scrips")), ArgparseArgument("-o", "--output-dir", default=".", help=_("output directory")), ArgparseArgument("-p", "--prefix", help=_("prefix of the SDK")), ArgparseArgument("--cmd", default=self.DEFAULT_CMD, help=_("command to run in the script")), ], )
def __init__(self): Command.__init__(self, [ArgparseArgument('name', nargs=1, default='sdk-shell', help=_('name of the scrips')), ArgparseArgument('-o', '--output-dir', default='.', help=_('output directory')), ArgparseArgument('-p', '--prefix', help=_('prefix of the SDK')), ArgparseArgument('--cmd', default=self.DEFAULT_CMD, help=_('command to run in the script')), ])
def __init__(self, args=[]): args = [ ArgparseArgument('bundlepackages', nargs='+', help=_('packages to bundle')), ArgparseArgument('--add-recipe', action='append', default=[], help=_('additional recipes to bundle')), ArgparseArgument('--no-bootstrap', action='store_true', default=False, help=_('Don\'t include bootstrep sources')), ] Command.__init__(self, args)
def __init__(self, config, load=True): self.set_config(config) self.recipes = {} # recipe_name -> recipe self._invalid_recipes = {} # recipe -> error self._mtimes = {} if not load: return self._restore_cache() if not os.path.exists(config.recipes_dir): raise FatalError(_("Recipes dir %s not found") % config.recipes_dir) self.update()
async def fetch(self, redownload=False): fname = self._get_download_path() if not os.path.exists(self.download_dir): os.makedirs(self.download_dir) cached_file = os.path.join(self.config.cached_sources, self.package_name, self.tarball_name) if not redownload and os.path.isfile(cached_file) and self.verify( cached_file, fatal=False): m.action(_('Copying cached tarball from %s to %s instead of %s') % (cached_file, fname, self.url), logfile=get_logfile(self)) shutil.copy(cached_file, fname) return await super().fetch(redownload=redownload)
def __init__(self): args = [ ArgparseArgument('--build-tools-only', action='store_true', default=False, help=_('only bootstrap the build tools')), ArgparseArgument('--system-only', action='store_true', default=False, help=('only boostrap the system')), ArgparseArgument('--offline', action='store_true', default=False, help=_('Use only the source cache, no network')), ArgparseArgument( '-y', '--assume-yes', action='store_true', default=False, help=( 'Automatically say yes to prompts and run non-interactively' )) ] Command.__init__(self, args)
def start(self): dest = self.config.toolchain_prefix ndk_zip = self.NDK_ZIP % (self.config.platform, self.config.arch) zip_file = os.path.join(dest, ndk_zip) try: os.makedirs(dest) except: pass shell.download("%s/%s" % (self.NDK_BASE_URL, ndk_zip), zip_file) if not os.path.exists(os.path.join(dest, "ndk-build")): try: shell.call('unzip %s' % ndk_zip, dest) shell.call('mv android-ndk-%s/* .' % self.NDK_VERSION, dest) except Exception, ex: raise FatalError(_("Error installing Android NDK: %s") % (ex))
def _parse(self, filename, reset=True): config = {'os': os, '__file__': filename} if not reset: for prop in self._properties: if hasattr(self, prop): config[prop] = getattr(self, prop) try: parse_file(filename, config) except: raise ConfigurationError( _('Could not include config file (%s)') % filename) for key in self._properties: if key in config: self.set_property(key, config[key], True)
def run(self, config, args): cookbook = CookBook(config) recipe_name = args.recipe[0] recursive = args.recursive recipe = cookbook.get_recipe(recipe_name) if recursive: ordered_recipes = cookbook.list_recipe_deps(recipe_name) else: ordered_recipes = [recipe] for recipe in ordered_recipes: if cookbook.recipe_needs_build(recipe.name): raise FatalError(_("Recipe %s is not built yet" % recipe.name)) for recipe in ordered_recipes: # call step function stepfunc = None try: stepfunc = getattr(recipe, 'check') except: m.message('%s has no check step, skipped' % recipe.name) if stepfunc: try: if asyncio.iscoroutinefunction(stepfunc): loop = asyncio.get_event_loop() loop.run_until_complete(stepfunc(recipe)) else: stepfunc() except FatalError as e: raise e except Exception as ex: raise FatalError( _("Error running %s checks: %s") % (recipe.name, ex))
def run(self, config, args): if not config.uninstalled: raise FatalError( _("fetch-cache is only available with " "cerbero-uninstalled")) git_dir = os.path.abspath(os.path.dirname(sys.argv[0])) sha = git.get_hash(git_dir, args.commit) deps = self.get_deps(config, args) if not args.skip_fetch: dep = self.find_dep(deps, sha) if dep: self.fetch_dep(config, dep, args.namespace) if args.job_id: self.update_log(config, args, deps, sha)
def __init__(self): Source.__init__(self) if not self.url: raise InvalidRecipeError( _("'url' attribute is missing in the recipe")) self.url = self.replace_name_and_version(self.url) if self.tarball_name is not None: self.tarball_name = \ self.replace_name_and_version(self.tarball_name) else: self.tarball_name = os.path.basename(self.url) if self.tarball_dirname is not None: self.tarball_dirname = \ self.replace_name_and_version(self.tarball_dirname) self.download_path = os.path.join(self.repo_dir, self.tarball_name)
def verify(self, fname, fatal=True): checksum = self._checksum(fname) if self.tarball_checksum is None: raise FatalError('tarball_checksum is missing in {}.recipe for tarball {}\n' 'The SHA256 of the current file is {}\nPlease verify and ' 'add it to the recipe'.format(self.name, self.url, checksum)) if checksum != self.tarball_checksum: movedto = fname + '.failed-checksum' os.replace(fname, movedto) m.action(_('Checksum failed, tarball %s moved to %s') % (fname, movedto), logfile=get_logfile(self)) if not fatal: return False raise FatalError('Checksum for {} is {!r} instead of {!r}' .format(fname, checksum, self.tarball_checksum)) return True
def check_call(cmd, cmd_dir=None, shell=False, split=True, fail=False): try: if split: cmd = shlex.split(cmd) process = subprocess.Popen(cmd, cwd=cmd_dir, stdout=subprocess.PIPE, stderr=open(os.devnull), shell=shell) output, unused_err = process.communicate() if process.poll() and fail: raise Exception() except Exception: raise FatalError(_("Error running command: %s") % cmd) return output
def __init__(self): Source.__init__(self) if not self.url: raise InvalidRecipeError( _("'url' attribute is missing in the recipe")) self.url = self.expand_url_template(self.url) self.url = self.replace_name_and_version(self.url) if self.tarball_name is not None: self.tarball_name = \ self.replace_name_and_version(self.tarball_name) if self.tarball_dirname is not None: self.tarball_dirname = \ self.replace_name_and_version(self.tarball_dirname) self.download_dir = self.repo_dir BaseTarball.__init__(self)
def _find_deps(self, recipe, state={}, ordered=[]): if state.get(recipe, 'clean') == 'processed': return if state.get(recipe, 'clean') == 'in-progress': raise FatalError(_("Dependency Cycle: {0}".format(recipe.name))) state[recipe] = 'in-progress' recipe_deps = recipe.list_deps() if not recipe.runtime_dep: recipe_deps = self._runtime_deps() + recipe_deps for recipe_name in recipe_deps: try: recipedep = self.get_recipe(recipe_name) except RecipeNotFoundError as e: raise FatalError( _("Recipe %s has a unknown dependency %s" % (recipe.name, recipe_name))) try: self._find_deps(recipedep, state, ordered) except FatalError: m.error('Error finding deps of "{0}"'.format(recipe.name)) raise state[recipe] = 'processed' ordered.append(recipe) return ordered
def call(cmd, cmd_dir='.', fail=True, env=None, verbose=False): ''' Run a shell command @param cmd: the command to run @type cmd: str @param cmd_dir: directory where the command will be run @param cmd_dir: str @param fail: whether or not to raise an exception if the command fails @type fail: bool ''' try: if LOGFILE is None: if verbose: m.message("Running command '%s'" % cmd) else: LOGFILE.write("Running command '%s'\n" % cmd) shell = True if PLATFORM == Platform.WINDOWS: # windows do not understand ./ if cmd.startswith('./'): cmd = cmd[2:] # run all processes through sh.exe to get scripts working cmd = '%s "%s"' % ('sh -c', cmd) # fix paths with backslashes cmd = _fix_mingw_cmd(cmd) # Disable shell which uses cmd.exe shell = False stream = LOGFILE or sys.stdout if DRY_RUN: # write to sdterr so it's filtered more easilly m.error("cd %s && %s && cd %s" % (cmd_dir, cmd, os.getcwd())) ret = 0 else: if not env: env = os.environ.copy() ret = subprocess.check_call(cmd, cwd=cmd_dir, stderr=subprocess.STDOUT, stdout=StdOut(stream), env=env, shell=shell) except subprocess.CalledProcessError: if fail: raise FatalError(_("Error running command: %s") % cmd) else: ret = 0 return ret
def download_curl(url, destination=None, recursive=False, check_cert=True, user=None, password=None, overwrite=False): ''' Downloads a file with cURL @param url: url to download @type: str @param destination: destination where the file will be saved @type destination: str @param user: the username to use when connecting @type user: str @param password: the password to use when connecting @type password: str @param overwrite: in case the file exists overwrite it @type overwrite: bool ''' path = None if recursive: raise FatalError(_("cURL doesn't support recursive downloads")) cmd = "curl -L " if user: cmd += "--user %s" % user if password: cmd += ":%s " % password else: cmd += " " if not check_cert: cmd += "-k " if destination is not None: cmd += "%s -o %s " % (url, destination) else: cmd += "-O %s " % url if not overwrite and os.path.exists(destination): logging.info("File %s already downloaded." % destination) else: logging.info("Downloading %s", url) try: call(cmd, path) except FatalError, e: os.remove(destination) raise e
def pack_deps(self, output_dir, tmpdir, force): for p in self.store.get_package_deps(self.package.name): stamp_path = os.path.join(tmpdir, p.name + '-stamp') if os.path.exists(stamp_path): # already built, skipping continue m.action( _('Packing dependency %s for package %s') % (p.name, self.package.name)) packager = self.__class__(self.config, p, self.store) try: packager.pack(output_dir, self.devel, force, True, True, tmpdir) except EmptyPackageError: self._empty_packages.append(p)
def extract(self): m.action(_('Extracting tarball to %s') % self.build_dir) if os.path.exists(self.build_dir): shutil.rmtree(self.build_dir) shell.unpack(self.download_path, self.config.sources) if self.tarball_dirname is not None: os.rename(os.path.join(self.config.sources, self.tarball_dirname), self.build_dir) git.init_directory(self.build_dir) for patch in self.patches: if not os.path.isabs(patch): patch = self.relative_path(patch) if self.strip == 1: git.apply_patch(patch, self.build_dir) else: shell.apply_patch(patch, self.build_dir, self.strip)
def start(self): dest = self.config.toolchain_prefix ndk_tar = self.NDK_TAR % (self.config.platform, self.config.arch) tar = os.path.join(dest, ndk_tar) try: os.makedirs(dest) except: pass shell.download("%s/%s" % (self.NDK_BASE_URL, ndk_tar), tar) if not os.path.exists(os.path.join(dest, "README.TXT")): try: shell.call('chmod +x ./%s' % ndk_tar, dest) shell.call('./%s' % ndk_tar, dest) shell.call('mv android-ndk-%s/* .' % (self.NDK_VERSION), dest) except Exception, ex: raise FatalError(_("Error installing Android NDK: %s") % (ex))
def fetch(self): if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) cached_dir = os.path.join(self.config.cached_sources, self.package_name) if os.path.isdir(os.path.join(cached_dir, ".svn")): m.action( _('Copying cached repo from %s to %s instead of %s') % (cached_dir, self.repo_dir, self.url)) shell.copy_dir(cached_dir, self.repo_dir) return os.makedirs(self.repo_dir) svn.checkout(self.url, self.repo_dir) svn.update(self.repo_dir, self.revision)
def request(self, url, values, token=None): headers = {} if token: headers = {"Private-Token": token} data = urllib.parse.urlencode(values) url = "%s?%s" % (url, data) req = urllib.request.Request(url, headers=headers) m.message("GET %s" % url) try: resp = urllib.request.urlopen(req) return json.loads(resp.read()) except urllib.error.URLError as e: raise FatalError(_(e.reason))
def start_cooking(self, recipes, use_binaries=False, upload_binaries=False, build_missing=True): ''' Cooks the provided recipe names ''' if isinstance(recipes, str): recipes = [recipes] m.message( _("Building the following recipes: %s") % ' '.join([x for x in recipes])) if use_binaries or upload_binaries: def _build(recipe, i, length): self.cook_recipe(recipe, i, length) if upload_binaries: try: fridge.freeze_recipe(recipe, i, length) except RecipeNotFreezableError: pass if not self.store: self.store = PackagesStore(self.cookbook.get_config()) fridge = Fridge(self.store, force=self.force, dry_run=self.dry_run) i = 1 for recipe in recipes: if use_binaries: try: fridge.unfreeze_recipe(recipe, i, len(recipes)) except (RecipeNotFreezableError, BuildStepError) as e: if build_missing or isinstance( e, RecipeNotFreezableError): _build(recipe, i, len(recipes)) else: raise e else: _build(recipe, i, len(recipes)) i += 1 else: i = 1 for recipe in recipes: self.cook_recipe(recipe, i, len(recipes)) i += 1
async def fetch(self, redownload=False): if self.offline: if not os.path.isfile(self.download_path): msg = 'Offline mode: tarball {!r} not found in local sources ({})' raise FatalError(msg.format(self.tarball_name, self.download_dir)) self.verify() m.action(_('Found %s at %s') % (self.url, self.download_path)) return if not os.path.exists(self.download_dir): os.makedirs(self.download_dir) # Enable certificate checking only on Linux for now # FIXME: Add more platforms here after testing cc = self.config.platform == Platform.LINUX await shell.download(self.url, self.download_path, check_cert=cc, overwrite=redownload, logfile=get_logfile(self), mirrors= self.config.extra_mirrors + DEFAULT_MIRRORS) self.verify()
def fetch(self): if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) cached_dir = os.path.join(self.config.cached_sources, self.package_name) if os.path.isdir(os.path.join(cached_dir, ".svn")): m.action(_('Copying cached repo from %s to %s instead of %s') % (cached_dir, self.repo_dir, self.url)) shell.copy_dir(cached_dir, self.repo_dir) return os.makedirs(self.repo_dir) if self.offline: raise FatalError('Offline mode: no cached svn repos found for {} at {!r}' ''.format(self.package_name, self.config.cached_sources)) svn.checkout(self.url, self.repo_dir) svn.update(self.repo_dir, self.revision)
def _create_packages_dmg(self): paths = list(self.packages_paths[PackageType.RUNTIME].values()) dmg_file = os.path.join(self.output_dir, self._package_name('-packages.dmg')) m.action(_("Creating image %s ") % dmg_file) # create a temporary directory to store packages workdir = os.path.join(self.tmp, "hdidir") os.makedirs(workdir) try: for p in paths: shutil.copy(p, workdir) # Create Disk Image cmd = 'hdiutil create %s -ov -srcfolder %s' % (dmg_file, workdir) shell.call(cmd) finally: shutil.rmtree(workdir)
def upload_curl(source, url, user=None, password=None): if not os.path.exists(source): raise FatalError(_("File %s does not exist.") % source) path = None cmd = "curl -T " cmd += "%s %s" % (source, url) if user: cmd += " --user %s" % user if password: cmd += ":%s " % password else: cmd += " " cmd += " --ftp-create-dirs " logging.info("Uploading %s to %s", source, url) call(cmd, path)
def __init__(self): args = [ ArgparseArgument('recipe', nargs='*', help=_('name of the recipe to build')), ArgparseArgument('--directory', default='.', help=_('directory of the module to be built')), ArgparseArgument('--local-sources', default='.', help=_('directory of the local source')), ArgparseArgument('--configure', action='store_true', default=False, help=_('with configure')), ArgparseArgument('--compile', action='store_true', default=False, help=_('with compile')), ArgparseArgument('--check', action='store_true', default=False, help=_('with check')), ArgparseArgument('--install', action='store_true', default=False, help=_('with install')), ArgparseArgument('--fetch', action='store_true', default=False, help=_('with fetch')), ArgparseArgument( '--dry-run', action='store_true', default=False, help=_('only print commands instead of running them ')) ] Command.__init__(self, args)
class CheckPackage(Command): doc = N_('Run checks on a given package') name = 'checkpackage' def __init__(self): Command.__init__(self, [ ArgparseArgument('package', nargs=1, help=_('name of the package to run checks on')), ]) def run(self, config, args): cookbook = CookBook(config) failed = [] p_name = args.package[0] store = PackagesStore(config) p = store.get_package(p_name) ordered_recipes = map(lambda x: cookbook.get_recipe(x), p.recipes_dependencies()) for recipe in ordered_recipes: if cookbook.recipe_needs_build(recipe.name): raise CommandError( _("Recipe %s is not built yet" % recipe.name)) for recipe in ordered_recipes: # call step function stepfunc = None try: stepfunc = getattr(recipe, 'check') except: m.message('%s has no check step, skipped' % recipe.name) if stepfunc: try: m.message('Running checks for recipe %s' % recipe.name) stepfunc() except Exception, ex: failed.append(recipe.name) m.warning(_("%s checks failed: %s") % (recipe.name, ex)) if failed: raise CommandError( _("Error running %s checks on:\n " + "\n ".join(failed)) % p_name)
def extract(self): m.action(_('Extracting tarball to %s') % self.build_dir) if os.path.exists(self.build_dir): shutil.rmtree(self.build_dir) super().extract(self.config.sources) if self.tarball_dirname is not None: extracted = os.path.join(self.config.sources, self.tarball_dirname) # Since we just extracted this, a Windows anti-virus might still # have a lock on files inside it. shell.windows_proof_rename(extracted, self.build_dir) git.init_directory(self.build_dir) for patch in self.patches: if not os.path.isabs(patch): patch = self.relative_path(patch) if self.strip == 1: git.apply_patch(patch, self.build_dir) else: shell.apply_patch(patch, self.build_dir, self.strip)
def create_parser(self): ''' Creates the arguments parser ''' class VariantAction(argparse.Action): def __call__(self, parser, namespace, value, option_string=None): current = getattr(namespace, self.dest) or [] # Convert comma-separated string to list additional = [v for v in value.split(',')] setattr(namespace, self.dest, current + additional) self.parser = argparse.ArgumentParser(description=_(description)) self.parser.add_argument( '-t', '--timestamps', action='store_true', default=False, help=_('Print timestamps with every message printed')) self.parser.add_argument('--list-variants', action='store_true', default=False, help=_('List available variants')) self.parser.add_argument('-v', '--variants', action=VariantAction, default=None, help=_('Variants to be used for the build')) self.parser.add_argument( '-c', '--config', action='append', type=str, default=None, help=_('Configuration file used for the build')) self.parser.add_argument( '-m', '--manifest', action='store', type=str, default=None, help=_('Manifest file used to fixate git revisions')) if os.path.basename(sys.argv[0]) == 'cerbero-uninstalled': self.parser.add_argument( '--self-update', action='store', type=str, default=None, help=_( 'Update cerbero git repository from manifest and exit.'))
def __init__(self, force=None, no_deps=None, deps_only=False): args = [ ArgparseArgument('recipe', nargs='*', help=_('name of the recipe to build')), ArgparseArgument('--missing-files', action='store_true', default=False, help=_( 'prints a list of files installed that are ' 'listed in the recipe')), ArgparseArgument( '--dry-run', action='store_true', default=False, help=_('only print commands instead of running them ')), ArgparseArgument('--offline', action='store_true', default=False, help=_('Use only the source cache, no network')), ArgparseArgument('--jobs', '-j', action='store', type=int, default=0, help=_('How many recipes to build concurrently. ' '0 = number of CPUs.')), ] if force is None: args.append( ArgparseArgument('--force', action='store_true', default=False, help=_( 'force the build of the recipe ingoring ' 'its cached state'))) if no_deps is None: args.append( ArgparseArgument('--no-deps', action='store_true', default=False, help=_('do not build dependencies'))) self.force = force self.no_deps = no_deps self.deps_only = deps_only Command.__init__(self, args)
def self_update(self): '''Update this instance of cerbero git repository''' if not self.args.self_update: return try: manifest = Manifest(self.args.self_update) manifest.parse() project = manifest.find_project('cerbero') git_dir = os.path.dirname(sys.argv[0]) git.add_remote(git_dir, project.remote, project.fetch_uri) run_until_complete(git.fetch(git_dir)) run_until_complete(git.checkout(git_dir, project.revision)) except FatalError as ex: self.log_error( _("ERROR: Failed to proceed with self update %s") % ex) sys.exit(0)