def __init__(self): if not self.tarball_name: self.tarball_name = os.path.basename(self.url) self.download_path = os.path.join(self.download_dir, self.tarball_name) # URL-encode spaces and other special characters in the URL's path split = list(urllib.parse.urlsplit(self.url)) split[2] = urllib.parse.quote(split[2]) self.url = urllib.parse.urlunsplit(split) self.mirror_url = urllib.parse.urljoin(TARBALL_MIRROR, self.tarball_name) o = urllib.parse.urlparse(self.url) if o.scheme in ('http', 'ftp'): raise FatalError('Download URL {!r} must use HTTPS'.format( self.url))
def __new__(klass, config, package, store): d = config.target_distro v = config.target_distro_version if d not in _packagers: raise FatalError(_("No packager available for the distro %s" % d)) if v not in _packagers[d]: # Be tolerant with the distro version m.warning( _("No specific packager available for the distro " "version %s, using generic packager for distro %s" % (v, d))) v = None return _packagers[d][v](config, package, store)
def get_vcvarsall(version=None): if version is not None: versions = [version] else: versions = sorted(vcvarsalls.keys(), reverse=True) program_files = get_program_files_dir() for version in versions: for path in vcvarsalls[version]: path = program_files / path # Find the location of the Visual Studio installation if path.is_file(): return path.as_posix(), version raise FatalError('Microsoft Visual Studio not found, please file a bug. ' 'We looked for: ' + ', '.join(versions))
def __init__(self, config, load=True, offline=False): self._config = config self._packages = {} # package_name -> package self.cookbook = CookBook(config, load=load, offline=offline) # used in tests to skip loading a dir with packages definitions if not load: return if not os.path.exists(config.packages_dir): raise FatalError( _("Packages dir %s not found") % config.packages_dir) self._load_packages()
def _find_deps(self, recipe, state={}, ordered=[]): if state.get(recipe, 'clean') == 'processed': return if state.get(recipe, 'clean') == 'in-progress': raise FatalError(_("Dependency Cycle: {0}".format(recipe.name))) state[recipe] = 'in-progress' recipe_deps = recipe.list_deps() if not recipe.runtime_dep: recipe_deps = self._runtime_deps () + recipe_deps for recipe_name in recipe_deps: try: recipedep = self.get_recipe(recipe_name) except RecipeNotFoundError as e: raise FatalError(_("Recipe %s has a unknown dependency %s" % (recipe.name, recipe_name))) try: self._find_deps(recipedep, state, ordered) except FatalError: m.error('Error finding deps of "{0}"'.format(recipe.name)) raise state[recipe] = 'processed' ordered.append(recipe) return ordered
def call(cmd, cmd_dir='.', fail=True, verbose=False): ''' Run a shell command @param cmd: the command to run @type cmd: str @param cmd_dir: directory where the command will be run @param cmd_dir: str @param fail: whether or not to raise an exception if the command fails @type fail: bool ''' global CALL_ENV try: if LOGFILE is None: if verbose: m.message("Running command '%s'" % cmd) else: LOGFILE.write("Running command '%s'\n" % cmd) LOGFILE.flush() shell = True if PLATFORM == Platform.WINDOWS: # windows do not understand ./ if cmd.startswith('./'): cmd = cmd[2:] # run all processes through sh.exe to get scripts working cmd = '%s "%s"' % ('sh -c', cmd) # fix paths with backslashes cmd = _fix_mingw_cmd(cmd) # Disable shell which uses cmd.exe shell = False stream = LOGFILE or sys.stdout if DRY_RUN: # write to sdterr so it's filtered more easilly m.error("cd %s && %s && cd %s" % (cmd_dir, cmd, os.getcwd())) ret = 0 else: if CALL_ENV is not None: env = CALL_ENV.copy() else: env = os.environ.copy() ret = subprocess.check_call(cmd, cwd=cmd_dir, stderr=subprocess.STDOUT, stdout=StdOut(stream), env=env, shell=shell) except subprocess.CalledProcessError: if fail: raise FatalError(_("Error running command: %s") % cmd) else: ret = 0 return ret
def gendef(self, dllpath, outputdir, libname): defname = libname + '.def' def_contents = shell.check_output('gendef - %s' % dllpath, outputdir, logfile=self.logfile) # If the output doesn't contain a 'LIBRARY' directive, gendef errored # out. However, gendef always returns 0 so we need to inspect the # output and guess. if 'LIBRARY' not in def_contents: raise FatalError('gendef failed on {!r}\n{}'.format( dllpath, def_contents)) with open(os.path.join(outputdir, defname), 'w') as f: f.write(def_contents) return defname
def __new__(klass, config, package, store): if config.target_platform == Platform.IOS: if not isinstance(package, MetaPackage): raise FatalError("iOS platform only support packages", "for MetaPackage") return IOSPackage(config, package, store) if isinstance(package, AppExtensionPackage): return ApplicationExtensionPackage(config, package, store) elif isinstance(package, Package): return OSXPackage(config, package, store) elif isinstance(package, MetaPackage): return ProductPackage(config, package, store) elif isinstance(package, App): return ApplicationPackage(config, package, store)
def _cook_recipe(self, recipe, count, total): if not self.cookbook.recipe_needs_build(recipe.name) and \ not self.force: m.build_step(count, total, recipe.name, _("already built")) return if self.missing_files: # create a temp file that will be used to find newer files tmp = tempfile.NamedTemporaryFile() recipe.force = self.force for desc, step in recipe.steps: m.build_step(count, total, recipe.name, step) # check if the current step needs to be done if self.cookbook.step_done(recipe.name, step) and not self.force: m.action(_("Step done")) continue try: # call step function stepfunc = getattr(recipe, step) if not stepfunc: raise FatalError(_('Step %s not found') % step) shell.set_logfile_output("%s/%s-%s.log" % (recipe.config.logs, recipe, step)) stepfunc() # update status successfully self.cookbook.update_step_status(recipe.name, step) shell.close_logfile_output() except FatalError as e: shell.close_logfile_output(dump=True) exc_traceback = sys.exc_info()[2] trace = '' # Don't print trace if the FatalError is merely that the # subprocess exited with a non-zero status. The traceback # is just confusing and useless in that case. if not isinstance(e.__context__, CalledProcessError): tb = traceback.extract_tb(exc_traceback)[-1] if tb.filename.endswith('.recipe'): # Print the recipe and line number of the exception # if it starts in a recipe trace += 'Exception at {}:{}\n'.format(tb.filename, tb.lineno) trace += e.args[0] + '\n' self._handle_build_step_error(recipe, step, trace, e.arch) except Exception: shell.close_logfile_output(dump=True) raise BuildStepError(recipe, step, traceback.format_exc()) self.cookbook.update_build_status(recipe.name, recipe.built_version()) if self.missing_files: self._print_missing_files(recipe, tmp) tmp.close()
def runargs(self, config, name, output_dir, prefix, libdir, py_prefix, cmd=None, env={}, prefix_env_name='GSTREAMER_ROOT'): if cmd == None: cmd = self.DEFAULT_CMD self._env = env prefix_env = '${%s}' % prefix_env_name libdir = libdir.replace(prefix, prefix_env) self._putvar('PATH', '%s/bin${PATH:+:$PATH}:/usr/local/bin:/usr/bin:/bin' % prefix_env) self._putvar('LD_LIBRARY_PATH', '%s${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}' % libdir) self._putvar('PKG_CONFIG_PATH', '%s/lib/pkgconfig:%s/share/pkgconfig' '${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}' % (prefix_env, prefix_env)) self._putvar('XDG_DATA_DIRS', '%s/share${XDG_DATA_DIRS:+:$XDG_DATA_DIRS}:' '/usr/local/share:/usr/share' % prefix_env) self._putvar('XDG_CONFIG_DIRS', '%s/etc/xdg${XDG_CONFIG_DIRS:+:$XDG_CONFIG_DIRS}:/etc/xdg' % prefix_env) self._putvar('GST_REGISTRY', '${HOME}/.gstreamer-0.10/gstreamer-cerbero-registry', None) self._putvar('GST_REGISTRY_1_0', '${HOME}/.cache/gstreamer-1.0/gstreamer-cerbero-registry', None) self._putvar('GST_PLUGIN_SCANNER', '%s/libexec/gstreamer-0.10/gst-plugin-scanner' % prefix_env) self._putvar('GST_PLUGIN_SCANNER_1_0', '%s/libexec/gstreamer-1.0/gst-plugin-scanner' % prefix_env) self._putvar('GST_PLUGIN_SYSTEM_PATH', '%s/lib/gstreamer-0.10' % prefix_env) self._putvar('GST_PLUGIN_SYSTEM_PATH_1_0', '%s/lib/gstreamer-1.0' % prefix_env) self._putvar('PYTHONPATH', '%s/%s/site-packages${PYTHONPATH:+:$PYTHONPATH}' % (prefix_env, py_prefix)) self._putvar('CFLAGS', '-I%s/include ${CFLAGS}' % prefix_env, " ") self._putvar('CXXFLAGS', '-I%s/include ${CXXFLAGS}' % prefix_env, " ") self._putvar('CPPFLAGS', '-I%s/include ${CPPFLAGS}' % prefix_env, " ") self._putvar('LDFLAGS', '-L%s ${LDFLAGS}' % libdir, " ") self._putvar('GIO_EXTRA_MODULES', '%s/gio/modules' % libdir) self._putvar('GI_TYPELIB_PATH', '%s/girepository-1.0' % libdir) envstr = 'export %s="%s"\n' % (prefix_env_name, prefix) for e, v in env.iteritems(): envstr += 'export %s="%s"\n' % (e, v) try: filepath = os.path.join(output_dir, name) if not os.path.exists(os.path.dirname(filepath)): os.mkdir(os.path.dirname(filepath)) with open(filepath, 'w+') as f: f.write(SCRIPT_TPL % (envstr, cmd)) shell.call("chmod +x %s" % filepath) except IOError, ex: raise FatalError(_("Error creating script: %s" % ex))
def _add_merge_modules(self): self.main_feature = etree.SubElement( self.product, "Feature", Id=self._format_id(self.package.name), Title=self.package.title, Level='1', Display="expand", AllowAdvertise="no", ConfigurableDirectory="INSTALLDIR") packages = [(self.store.get_package(x[0]), x[1], x[2]) for x in self.package.packages] # Remove empty packages packages = [ x for x in packages if x[0] in list(self.packages_deps.keys()) ] if len(packages) == 0: raise FatalError("All packages are empty: %s" % [x[0] for x in self.package.packages]) # Fill the list of required packages, which are the ones installed by # a package that is always installed req = [x[0] for x in packages if x[1] is True] required_packages = req[:] for p in req: required_packages.extend(self.store.get_package_deps(p, True)) if not self.package.wix_use_fragment: for package, required, selected in packages: if package in self.packages_deps: self._add_merge_module(package, required, selected, required_packages) # Add a merge module ref for all the packages or use ComponentGroupRef when using # wix_use_fragment for package, path in self.packages_deps.items(): if self.package.wix_use_fragment: etree.SubElement(self.main_feature, 'ComponentGroupRef', Id=self._format_group_id(package.name)) else: etree.SubElement(self.installdir, 'Merge', Id=self._package_id(package.name), Language='1033', SourceFile=path, DiskId='1')
def run(self, config, args): cookbook = CookBook(config) recipe_name = args.recipe[0] recursive = args.recursive recipe = cookbook.get_recipe(recipe_name) if recursive: ordered_recipes = cookbook.list_recipe_deps(recipe_name) else: ordered_recipes = [recipe] for recipe in ordered_recipes: if cookbook.recipe_needs_build(recipe.name): raise FatalError(_("Recipe %s is not built yet" % recipe.name)) for recipe in ordered_recipes: # call step function stepfunc = None try: stepfunc = getattr(recipe, 'check') except: m.message('%s has no check step, skipped' % recipe.name) if stepfunc: try: if asyncio.iscoroutinefunction(stepfunc): loop = asyncio.get_event_loop() loop.run_until_complete(stepfunc(recipe)) else: stepfunc() except FatalError as e: raise e except Exception as ex: raise FatalError( _("Error running %s checks: %s") % (recipe.name, ex))
def _find_data_dir(self): if self.uninstalled: self.data_dir = os.path.join(os.path.dirname(__file__), '..', 'data') self.data_dir = os.path.abspath(self.data_dir) return curdir = os.path.dirname(__file__) while not os.path.exists(os.path.join(curdir, 'share', 'cerbero', 'config')): curdir = os.path.abspath(os.path.join(curdir, '..')) if curdir == '/' or curdir[1:] == ':/': # We reached the root without finding the data dir, which # shouldn't happen raise FatalError("Data dir not found") self.data_dir = os.path.join(curdir, 'share', 'cerbero')
def _find_plugin_dll_files(self, f): # Plugin template is always libfoo%(mext)s if not f.startswith('lib'): raise AssertionError('Plugin files must start with "lib": {!r}'.format(f)) # Plugin DLLs are required to be libfoo.dll (mingw) or foo.dll (msvc) if (Path(self.config.prefix) / f).is_file(): # libfoo.dll return [f] if self.using_msvc(): fdir, fname = os.path.split(f) fmsvc = '{}/{}'.format(fdir, fname[3:]) if (Path(self.config.prefix) / fmsvc).is_file(): # foo.dll, foo.pdb return [fmsvc, fmsvc[:-3] + 'pdb'] raise FatalError('GStreamer plugin {!r} not found'.format(f))
def run(self, config, args): if not config.uninstalled: raise FatalError( _("fetch-cache is only available with " "cerbero-uninstalled")) git_dir = os.path.dirname(sys.argv[0]) sha = git.get_hash(git_dir, args.commit) deps = self.get_deps(config, args) if not args.skip_fetch: dep = self.find_dep(deps, sha) if dep: run_until_complete(self.fetch_dep(config, dep, args.namespace)) if args.job_id: self.update_log(config, args, deps, sha)
def __init__(self, config, load=True): self.set_config(config) self.recipes = {} # recipe_name -> recipe self._invalid_recipes = {} # recipe -> error self._mtimes = {} if not load: return self._restore_cache() if not os.path.exists(config.recipes_dir): raise FatalError( _("Recipes dir %s not found") % config.recipes_dir) self.update()
def check_call(cmd, cmd_dir=None, shell=False, split=True, fail=False): if split and isinstance(cmd, str): cmd = shlex.split(cmd) try: process = subprocess.Popen(cmd, cwd=cmd_dir, stdout=subprocess.PIPE, stderr=open(os.devnull), shell=shell) output, unused_err = process.communicate() if process.poll() and fail: raise Exception() except Exception: raise FatalError(_("Error running command: %s") % cmd) return output
def download_curl(url, destination=None, recursive=False, check_cert=True, user=None, password=None, overwrite=False): ''' Downloads a file with cURL @param url: url to download @type: str @param destination: destination where the file will be saved @type destination: str @param user: the username to use when connecting @type user: str @param password: the password to use when connecting @type password: str @param overwrite: in case the file exists overwrite it @type overwrite: bool ''' path = None if recursive: raise FatalError(_("cURL doesn't support recursive downloads")) cmd = "curl -L " if user: cmd += "--user %s" % user if password: cmd += ":%s " % password else: cmd += " " if not check_cert: cmd += "-k " if destination is not None: cmd += "%s -o %s " % (url, destination) else: cmd += "-O %s " % url if not overwrite and os.path.exists(destination): logging.info("File %s already downloaded." % destination) else: logging.info("Downloading %s", url) try: call(cmd, path) except FatalError, e: os.remove(destination) raise e
def dump(self, name, output_dir='.'): sdk = self.sdk(name) desc = { 'name': name, 'version': sdk.version, 'platform': self.config.target_platform, 'arch': self.config.target_arch, 'recipes': {}, 'commit': self.commit() } for rname in self.recipes_of_sdk(name): recipe = self.recipe(rname) desc['recipes'][rname] = recipe.version tarball = DistTarball(self.config, sdk, self.store) files = [] for ptype in [PackageType.DEVEL, PackageType.RUNTIME]: TNAME = { PackageType.DEVEL: 'devel', PackageType.RUNTIME: 'runtime' } filename = tarball._get_name(ptype) path = os.path.join(output_dir, filename) if os.path.exists(path): files.append( {filename: { 'type': TNAME[ptype], 'MD5Sum': MD5(path) }}) else: if (name == 'build-tools') and (ptype == PackageType.DEVEL): continue # build-tools has no devel package reason = "abstract %s, but no %s package at %s" % ( name, TNAME[ptype], path) m.error(reason) raise FatalError(reason) desc['packages'] = files if name == 'build-tools': desc['prefix'] = self.config.build_tools_prefix else: desc['prefix'] = self.config.prefix return desc
async def async_call(cmd, cmd_dir='.', fail=True, logfile=None, cpu_bound=True, env=None): ''' Run a shell command @param cmd: the command to run @type cmd: str @param cmd_dir: directory where the command will be run @param cmd_dir: str ''' global CPU_BOUND_SEMAPHORE, NON_CPU_BOUND_SEMAPHORE semaphore = CPU_BOUND_SEMAPHORE if cpu_bound else NON_CPU_BOUND_SEMAPHORE async with semaphore: cmd = _cmd_string_to_array(cmd, env) if logfile is None: stream = None else: logfile.write("Running command '%s'\n" % ' '.join([shlex.quote(c) for c in cmd])) logfile.flush() stream = logfile if DRY_RUN: # write to sdterr so it's filtered more easilly m.error("cd %s && %s && cd %s" % (cmd_dir, cmd, os.getcwd())) return env = os.environ.copy() if env is None else env.copy() # Force python scripts to print their output on newlines instead # of on exit. Ensures that we get continuous output in log files. env['PYTHONUNBUFFERED'] = '1' proc = await asyncio.create_subprocess_exec(*cmd, cwd=cmd_dir, stderr=subprocess.STDOUT, stdout=stream, env=env) await proc.wait() if proc.returncode != 0 and fail: raise FatalError('Running {!r}, returncode {}'.format( cmd, proc.returncode)) return proc.returncode
def request(self, url, values, token=None): headers = {} if token: headers = {"Private-Token": token} data = urllib.parse.urlencode(values) url = "%s?%s" % (url, data) req = urllib.request.Request(url, headers=headers) m.message("GET %s" % url) try: resp = urllib.request.urlopen(req) return json.loads(resp.read()) except urllib.error.URLError as e: raise FatalError(_(e.reason))
def _format_version(self, version): # mayor and minor must be less than 256 on windows, # so 2012.5 must be changed to 20.12.5 versions = version.split('.') tversions = [] for version in versions: i = int(version) if i > 9999: raise FatalError("Unsupported version number, mayor and minor " "must be less than 9999") elif i > 255: tversions.append(version[:-2]) tversions.append(version[-2:]) else: tversions.append(version) return '.'.join(tversions)
def run(self, config, args): self.config = config self.args = args self.profile = {} self._load_release() for name in args.name: if self.profile.get(name,None) is None \ and not self.args.deps_only: m.error("can not find package of %s from profiles." % name) raise FatalError("can not find package of %s from profiles." % name) if name == 'build-tools': self._build_tools() else: self._package(name)
async def fetch(self, redownload=False): if self.offline: if not os.path.isfile(self.download_path): msg = 'Offline mode: tarball {!r} not found in local sources ({})' raise FatalError(msg.format(self.tarball_name, self.download_dir)) self.verify() m.action(_('Found %s at %s') % (self.url, self.download_path)) return if not os.path.exists(self.download_dir): os.makedirs(self.download_dir) # Enable certificate checking only on Linux for now # FIXME: Add more platforms here after testing cc = self.config.platform == Platform.LINUX await shell.download(self.url, self.download_path, check_cert=cc, overwrite=redownload, logfile=get_logfile(self), mirrors= self.config.extra_mirrors + DEFAULT_MIRRORS) self.verify()
def _resolve_cmd(cmd, env): ''' On Windows, we can't pass the PATH variable through the env= kwarg to subprocess.* and expect it to use that value to search for the command, because Python uses CreateProcess directly. Unlike execvpe, CreateProcess does not use the PATH env var in the env supplied to search for the executable. Hence, we need to search for it manually. ''' if PLATFORM != Platform.WINDOWS or env is None or 'PATH' not in env: return cmd if not os.path.isabs(cmd[0]): resolved_cmd = shutil.which(cmd[0], path=env['PATH']) if not resolved_cmd: raise FatalError('Could not find {!r} in PATH {!r}'.format( cmd[0], env['PATH'])) cmd[0] = resolved_cmd return cmd
def upload_curl(source, url, user=None, password=None): if not os.path.exists(source): raise FatalError(_("File %s does not exist.") % source) path = None cmd = "curl -T " cmd += "%s %s" % (source, url) if user: cmd += " --user %s" % user if password: cmd += ":%s " % password else: cmd += " " cmd += " --ftp-create-dirs " logging.info("Uploading %s to %s", source, url) call(cmd, path)
def fetch(self): if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) cached_dir = os.path.join(self.config.cached_sources, self.package_name) if os.path.isdir(os.path.join(cached_dir, ".svn")): m.action(_('Copying cached repo from %s to %s instead of %s') % (cached_dir, self.repo_dir, self.url)) shell.copy_dir(cached_dir, self.repo_dir) return os.makedirs(self.repo_dir) if self.offline: raise FatalError('Offline mode: no cached svn repos found for {} at {!r}' ''.format(self.package_name, self.config.cached_sources)) svn.checkout(self.url, self.repo_dir) svn.update(self.repo_dir, self.revision)
async def fetch(self, checkout=True): # First try to get the sources from the cached dir if there is one cached_dir = os.path.join(self.config.cached_sources, self.name) if not os.path.exists(self.repo_dir): if not cached_dir and self.offline: msg = 'Offline mode: git repo for {!r} not found in cached sources ({}) or local sources ({})' raise FatalError( msg.format(self.name, self.config.cached_sources, self.repo_dir)) git.init(self.repo_dir, logfile=get_logfile(self)) if os.path.isdir(os.path.join(cached_dir, ".git")): for remote, url in self.remotes.items(): git.add_remote(self.repo_dir, remote, "file://" + cached_dir, logfile=get_logfile(self)) await git.fetch(self.repo_dir, fail=False, logfile=get_logfile(self)) else: cached_dir = None # add remotes from both upstream and config so user can easily # cherry-pick patches between branches for remote, url in self.remotes.items(): git.add_remote(self.repo_dir, remote, url, logfile=get_logfile(self)) # fetch remote branches if not self.offline: await git.fetch(self.repo_dir, fail=False, logfile=get_logfile(self)) if checkout: commit = self.config.recipe_commit(self.name) or self.commit await git.checkout(self.repo_dir, commit, logfile=get_logfile(self)) await git.submodules_update(self.repo_dir, cached_dir, fail=False, offline=self.offline, logfile=get_logfile(self))
def __new__(klass, config, package, store): d = config.target_distro v = config.target_distro_version if d not in _packagers: raise FatalError(_("No packager available for the distro %s" % d)) if v not in _packagers[d]: # Be tolerant with the distro version m.warning(_("No specific packager available for the distro " "version %s, using generic packager for distro %s" % (v, d))) v = None if (d == Distro.WINDOWS and config.platform == Platform.LINUX): m.warning("Cross-compiling for Windows, overriding Packager") d = Distro.NONE return _packagers[d][v](config, package, store)
def windows_arch(): """ Detecting the 'native' architecture of Windows is not a trivial task. We cannot trust that the architecture that Python is built for is the 'native' one because you can run 32-bit apps on 64-bit Windows using WOW64 and people sometimes install 32-bit Python on 64-bit Windows. """ # These env variables are always available. See: # https://msdn.microsoft.com/en-us/library/aa384274(VS.85).aspx # https://blogs.msdn.microsoft.com/david.wang/2006/03/27/howto-detect-process-bitness/ arch = os.environ.get('PROCESSOR_ARCHITEW6432', '').lower() if not arch: # If this doesn't exist, something is messing with the environment try: arch = os.environ['PROCESSOR_ARCHITECTURE'].lower() except KeyError: raise FatalError(_('Unable to detect Windows architecture')) return arch