async def unpack(filepath, output_dir, logfile=None): ''' Extracts a tarball @param filepath: path of the tarball @type filepath: str @param output_dir: output directory @type output_dir: str ''' m.log('Unpacking {} in {}'.format(filepath, output_dir), logfile) # Recent versions of tar are much faster than the tarfile module, but we # can't use tar on Windows because MSYS tar is ancient and buggy. if filepath.endswith(TARBALL_SUFFIXES): if PLATFORM != Platform.WINDOWS: if not os.path.exists(output_dir): os.makedirs(output_dir) await async_call(['tar', '-C', output_dir, '-xf', filepath]) else: cmode = 'bz2' if filepath.endswith('bz2') else filepath[-2:] tf = tarfile.open(filepath, mode='r:' + cmode) tf.extractall(path=output_dir) elif filepath.endswith('.zip'): zf = zipfile.ZipFile(filepath, "r") zf.extractall(path=output_dir) else: raise FatalError("Unknown tarball format %s" % filepath)
def apply_patch(patch, directory, strip=1, logfile=None): ''' Apply a patch @param patch: path of the patch file @type patch: str @param directory: directory to apply the apply @type: directory: str @param strip: strip @type strip: int ''' m.log("Applying patch {}".format(patch), logfile) call('%s -p%s -f -i %s' % (PATCH, strip, patch), directory)
async def download(url, destination=None, check_cert=True, overwrite=False, logfile=None, mirrors=None): ''' Downloads a file @param url: url to download @type: str @param destination: destination where the file will be saved @type destination: str @param check_cert: whether to check certificates or not @type check_cert: bool @param overwrite: whether to overwrite the destination or not @type check_cert: bool @param logfile: path to the file to log instead of stdout @type logfile: str @param mirrors: list of mirrors to use as fallback @type logfile: list ''' if not overwrite and os.path.exists(destination): if logfile is None: logging.info("File %s already downloaded." % destination) return else: if not os.path.exists(os.path.dirname(destination)): os.makedirs(os.path.dirname(destination)) m.log("Downloading {}".format(url), logfile) urls = [url] if mirrors is not None: filename = os.path.basename(url) # Add a traling '/' the url so that urljoin joins correctly urls # in case users provided it without the trailing '/' urls += [urllib.parse.urljoin(u + '/', filename) for u in mirrors] # wget shipped with msys fails with an SSL error on github URLs # https://githubengineering.com/crypto-removal-notice/ # curl on Windows (if provided externally) is often badly-configured and fails # to download over https, so just always use urllib2 on Windows. if sys.platform.startswith('win'): download_func = download_urllib2 elif which('wget'): download_func = download_wget elif which('curl'): download_func = download_curl else: # Fallback. TODO: make this the default and remove curl/wget dependency download_func = download_urllib2 errors = [] for murl in urls: try: return await download_func(murl, destination, check_cert, overwrite, logfile) except Exception as ex: errors.append((murl, ex)) if len(errors) == 1: errors = errors[0] raise FatalError('Failed to download {!r}: {!r}'.format(url, errors))
async def configure(self): # Build with PIC for static linking self.configure_tpl += ' --with-pic ' # Only use --disable-maintainer mode for real autotools based projects if os.path.exists(os.path.join(self.config_src_dir, 'configure.in')) or\ os.path.exists(os.path.join(self.config_src_dir, 'configure.ac')): self.configure_tpl += " --disable-maintainer-mode " self.configure_tpl += " --disable-silent-rules " # Never build gtk-doc documentation self.configure_tpl += " --disable-gtk-doc " if self.config.variants.gi and not self.disable_introspection \ and self.use_gobject_introspection(): self.configure_tpl += " --enable-introspection " else: self.configure_tpl += " --disable-introspection " if self.autoreconf: await shell.async_call(self.autoreconf_sh, self.config_src_dir, logfile=self.logfile, env=self.env) # We don't build libtool on Windows if self.config.platform == Platform.WINDOWS: self.override_libtool = False # Use our own config.guess and config.sub config_datadir = os.path.join(self.config._relative_path('data'), 'autotools') cfs = {'config.guess': config_datadir, 'config.sub': config_datadir} # ensure our libtool modifications are actually picked up by recipes if self.name != 'libtool' and self.override_libtool: cfs['ltmain.sh'] = os.path.join(self.config.build_tools_prefix, 'share/libtool/build-aux') for cf, srcdir in cfs.items(): find_cmd = 'find {} -type f -name {}'.format( self.config_src_dir, cf) files = await shell.async_call_output(find_cmd, logfile=self.logfile, env=self.env) files = files.split('\n') files.remove('') for f in files: o = os.path.join(srcdir, cf) m.log("CERBERO: copying %s to %s" % (o, f), self.logfile) shutil.copy(o, f) if self.config.platform == Platform.WINDOWS and \ self.supports_cache_variables: # On windows, environment variables are upperscase, but we still # need to pass things like am_cv_python_platform in lowercase for # configure and autogen.sh for k, v in self.env.items(): if k[2:6] == '_cv_': self.configure_tpl += ' %s="%s"' % (k, v) if self.add_host_build_target: if self.config.host is not None: self.configure_tpl += ' --host=%(host)s' if self.config.build is not None: self.configure_tpl += ' --build=%(build)s' if self.config.target is not None: self.configure_tpl += ' --target=%(target)s' use_configure_cache = self.config.use_configure_cache if self.use_system_libs and self.config.allow_system_libs: use_configure_cache = False if self._new_env: use_configure_cache = False if use_configure_cache and self.can_use_configure_cache: cache = os.path.join(self.config.sources, '.configure.cache') self.configure_tpl += ' --cache-file=%s' % cache # Add at the very end to allow recipes to override defaults self.configure_tpl += " %(options)s " await MakefilesBase.configure(self)