def configure(self): if self.supports_non_src_build: self.config_sh = os.path.join(self.repo_dir, self.config_sh) # Only use --disable-maintainer mode for real autotools based projects if os.path.exists(os.path.join(self.make_dir, 'configure.in')) or\ os.path.exists(os.path.join(self.make_dir, 'configure.ac')): self.configure_tpl += " --disable-maintainer-mode " self.configure_tpl += " --disable-silent-rules " if self.autoreconf: shell.call(self.autoreconf_sh, self.make_dir) files = shell.check_call('find %s -type f -name config.guess' % self.make_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.guess') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) files = shell.check_call('find %s -type f -name config.sub' % self.make_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.sub') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) if self.config.platform == Platform.WINDOWS and \ self.supports_cache_variables: # On windows, environment variables are upperscase, but we still # need to pass things like am_cv_python_platform in lowercase for # configure and autogen.sh for k, v in os.environ.iteritems(): if k[2:6] == '_cv_': self.configure_tpl += ' %s="%s"' % (k, v) if self.add_host_build_target: if self.config.host is not None: self.configure_tpl += ' --host=%(host)s' if self.config.build is not None: self.configure_tpl += ' --build=%(build)s' if self.config.target is not None: self.configure_tpl += ' --target=%(target)s' use_configure_cache = self.config.use_configure_cache if self.use_system_libs and self.config.allow_system_libs: use_configure_cache = False if self.new_env or self.append_env: use_configure_cache = False if use_configure_cache and self.can_use_configure_cache: cache = os.path.join(self.config.sources, '.configure.cache') self.config_sh += ' --cache-file=%s' % cache MakefilesBase.configure(self)
def install_python_sdk(self): m.action(_("Installing Python headers")) tmp_dir = tempfile.mkdtemp() shell.call("git clone %s" % os.path.join(self.config.git_root, 'windows-external-sdk.git'), tmp_dir) python_headers = os.path.join(self.prefix, 'include', 'Python2.7') python_headers = to_unixpath(os.path.abspath(python_headers)) shell.call('mkdir -p %s' % python_headers) python_libs = os.path.join(self.prefix, 'lib') python_libs = to_unixpath(python_libs) temp = to_unixpath(os.path.abspath(tmp_dir)) shell.call('cp -f %s/windows-external-sdk/python27/%s/include/* %s' % (temp, self.version, python_headers)) shell.call('cp -f %s/windows-external-sdk/python27/%s/lib/* %s' % (temp, self.version, python_libs)) pydll = '%s/lib/python.dll' % self.prefix try: os.remove(pydll) except: pass shell.call('ln -s python27.dll %s' % (pydll)) shutil.rmtree(tmp_dir)
def create_package(self, root, pkg_id, version, title, output_file, destination='/opt/', scripts_path=None): ''' Creates an osx flat package, where all files are properly bundled in a directory that is set as the package root @param root: root path @type root: str @param pkg_id: package indentifier @type pkg_id: str @param version: package version @type version: str @param title: package title @type title: str @param output_file: path of the output file @type output_file: str @param destination: installation path @type destination: str @param scripts_path: relative path for package scripts @type scripts_path: str ''' args = {'root': root, 'identifier': pkg_id, 'version': version, 'install-location': destination} if scripts_path is not None: args['scripts'] = scripts_path #plist = tempfile.NamedTemporaryFile() #cpl = ComponentPropertyPlist(title, os.path.basename(output_file)) #cpl.save(plist.name) #args['component-plist'] = plist.name shell.call(self._cmd_with_args(args, output_file))
def extract(self): if os.path.exists(self.build_dir): # fix read-only permissions if self.config.platform == Platform.WINDOWS: shell.call('chmod -R +w .git/', self.build_dir, fail=False) try: commit_hash = git.get_hash(self.repo_dir, self.commit) checkout_hash = git.get_hash(self.build_dir, 'HEAD') if commit_hash == checkout_hash and not self.patches: return False except Exception: pass shutil.rmtree(self.build_dir) if not os.path.exists(self.build_dir): os.mkdir(self.build_dir) # checkout the current version git.local_checkout(self.build_dir, self.repo_dir, self.commit) for patch in self.patches: if not os.path.isabs(patch): patch = self.relative_path(patch) if self.strip == 1: git.apply_patch(patch, self.build_dir) else: shell.apply_patch(patch, self.build_dir, self.strip) return True
def _install_dotnet_for_wine(self): self._download_missing_wine_deps() # wineconsole fails trying to get env var in a VT with DISPLAY. # This is working on docker buildbot and on a real terminal. if not 'DISPLAY' in os.environ: os.environ['WINE'] = "wineconsole" shell.call('%s -q dotnet40 corefonts' % self.winetricks_tool)
def _install_bundle_specific_files(self): # Installing desktop file and runner script shell.call("cp %s %s" % (self.desktop_file, self.tmp_install_dir), fail=False) filepath = os.path.join(self.tmp_install_dir, "AppRun") # Base environment variables env = {} env['GSETTINGS_SCHEMA_DIR'] = '${APPDIR}/share/glib-2.0/schemas/:${GSETTINGS_SCHEMA_DIR}' env['GDK_PIXBUF_MODULE_FILE'] = './lib/gdk-pixbuf-2.0/2.10.0/loaders.cache' env['GST_REGISTRY'] = '${HOME}/.cache/gstreamer-1.0/%s-bundle-registry' % self.package.name env['GST_REGISTRY_1_0'] = '${HOME}/.cache/gstreamer-1.0/%s-bundle-registry' % self.package.name if hasattr(self.package, "default_gtk_theme"): env['GTK_THEME'] = self.package.default_gtk_theme launch_command = LAUNCH_BUNDLE_COMMAND % ({ "prefix": self.tmp_install_dir, "executable_path": self.package.commands[0][1], "appname": self.package.name}) shellvarsgen = gensdkshell.GenSdkShell() shellvarsgen.runargs(self.config, "AppRun", self.tmp_install_dir, "${APPDIR}", "${APPDIR}/lib", self.config.py_prefix, cmd=launch_command, env=env, prefix_env_name="APPDIR")
def _install_perl_deps(self): # Install cpan-minus, a zero-conf CPAN wrapper cpanm_installer = tempfile.NamedTemporaryFile().name shell.download(self.CPANM_URL, cpanm_installer) shell.call('chmod +x %s' % cpanm_installer) # Install XML::Parser, required for intltool shell.call("sudo %s XML::Parser" % cpanm_installer)
def install_python_sdk(self): ### FIXME : MOVE OVER REPOSITORY TO STANDARD ROOT old_sdk_git_root = 'git://anongit.freedesktop.org/gstreamer-sdk' m.action(_("Installing Python headers")) tmp_dir = tempfile.mkdtemp() shell.call("git clone %s" % os.path.join(old_sdk_git_root, 'windows-external-sdk.git'), tmp_dir) python_headers = os.path.join(self.prefix, 'include', 'Python2.7') python_headers = to_unixpath(os.path.abspath(python_headers)) shell.call('mkdir -p %s' % python_headers) python_libs = os.path.join(self.prefix, 'lib') python_libs = to_unixpath(python_libs) temp = to_unixpath(os.path.abspath(tmp_dir)) shell.call('cp -f %s/windows-external-sdk/python27/%s/include/* %s' % (temp, self.version, python_headers)) shell.call('cp -f %s/windows-external-sdk/python27/%s/lib/* %s' % (temp, self.version, python_libs)) try: os.remove('%s/lib/python.dll' % self.prefix) except: pass shell.call('ln -s python27.dll python.dll', '%s/lib' % self.prefix) shutil.rmtree(tmp_dir)
def local_checkout(git_dir, local_git_dir, commit): ''' Clone a repository for a given commit in a different location @param git_dir: destination path of the git repository @type git_dir: str @param local_git_dir: path of the source git repository @type local_git_dir: str @param commit: the commit to checkout @type commit: false ''' # reset to a commit in case it's the first checkout and the masterbranch is # missing branch_name = 'cerbero_build' shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir, env=CLEAN_ENV) shell.call('%s branch %s' % (GIT, branch_name), local_git_dir, fail=False, env=CLEAN_ENV) shell.call('%s checkout %s' % (GIT, branch_name), local_git_dir, env=CLEAN_ENV) shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir, env=CLEAN_ENV) shell.call('%s clone %s -s -b %s .' % (GIT, local_git_dir, branch_name), git_dir, env=CLEAN_ENV) submodules_update(git_dir, local_git_dir)
def create_package(self, root, pkg_id, version, title, output_file, destination='/opt/', scripts_path=None): ''' Creates an osx flat package, where all files are properly bundled in a directory that is set as the package root @param root: root path @type root: str @param pkg_id: package indentifier @type pkg_id: str @param version: package version @type version: str @param title: package title @type title: str @param output_file: path of the output file @type output_file: str @param destination: installation path @type destination: str @param scripts_path: relative path for package scripts @type scripts_path: str ''' args = {'root': root, 'identifier': pkg_id, 'version': version, 'install-location': destination} if scripts_path is not None: args['scripts'] = scripts_path shell.call(self._cmd_with_args(args, output_file))
def change_lib_path(self, object_file, old_path, new_path): for lib in self.list_shared_libraries(object_file): if old_path in lib: new_path = lib.replace(old_path, new_path) cmd = '%s -change "%s" "%s" "%s"' % (INT_CMD, lib, new_path, object_file) shell.call(cmd, fail=True)
def change_id(self, object_file, id=None): id = id or object_file.replace(self.lib_prefix, self.new_lib_prefix) filename = os.path.basename(object_file) if not (filename.endswith('so') or filename.endswith('dylib')): return cmd = '%s -id %s %s' % (INT_CMD, id, object_file) shell.call(cmd, fail=False)
def change_libs_path(self, object_file): for lib in self.list_shared_libraries(object_file): if self.lib_prefix in lib: new_lib = lib.replace(self.lib_prefix, self.new_lib_prefix) cmd = '%s -change %s %s %s' % (INT_CMD, lib, new_lib, object_file) shell.call(cmd)
def _install_perl_deps(self): # Install cpan-minus, a zero-conf CPAN wrapper cpanm_installer = tempfile.NamedTemporaryFile() shell.download(self.CPANM_URL, cpanm_installer.name, overwrite=True) shell.call("chmod +x %s" % cpanm_installer.name) # Install XML::Parser, required for intltool shell.call("sudo %s XML::Parser" % cpanm_installer.name) cpanm_installer.close()
def start(self): # libvpx needs this change because if You first install Command line tools and then Xcode # , your xcode-select variable is set to command line tools not to Xcode itself. # Command line tools don't have iOS libraries that are needed for build; if self.config.distro_version in [DistroVersion.OS_X_HIGH_SIERRA]: shell.call('sudo xcode-select -switch /Applications/Xcode.app') # FIXME: enable it when buildbots are properly configured return
def setUp(self): self.config = DummyConfig() self.store = create_store(self.config) self.tmp = tempfile.mkdtemp() self.package = self.store.get_package('gstreamer-runtime') self.packages_path = os.path.join(self.tmp, 'test.pkg') os.mkdir(self.packages_path) shell.call('touch %s file1 file2 file3', self.packages_path)
def revert_all(repo): ''' Reverts all changes in a repository @param repo: the path to the repository @type repo: str ''' shell.call('svn revert -R .', repo)
def start(self): # FIXME: enable it when buildbots are properly configured return tar = self.GCC_TAR[self.config.distro_version] url = os.path.join(self.GCC_BASE_URL, tar) pkg = os.path.join(self.config.local_sources, tar) shell.download(url, pkg, check_cert=False) shell.call('sudo installer -pkg %s -target /' % pkg)
def _create_dmg(self): dmg_file = os.path.join(self.output_dir, '%s-%s-%s.dmg' % ( self.package.app_name, self.package.version, self.config.target_arch)) # Create Disk Image cmd = 'hdiutil create %s -volname %s -ov -srcfolder %s' % \ (dmg_file, self.package.app_name, self.tmp) shell.call(cmd) return dmg_file
def configure(self): shell.call(self.configure_tpl % {'config-sh': self.config_sh, 'prefix': to_unixpath(self.config.prefix), 'libdir': to_unixpath(self.config.libdir), 'host': self.config.host, 'target': self.config.target, 'build': self.config.build, 'options': self.configure_options}, self.make_dir)
def compile(self, objects, msi_file, output_dir, merge_module=False): self.options['objects'] = ' '.join(objects) self.options['msi'] = msi_file if merge_module: self.options['ext'] = 'msm' else: self.options['ext'] = 'msi' shell.call(self.cmd % self.options, output_dir) return os.path.join(output_dir, '%(msi)s.%(ext)s' % self.options)
def _create_packages_dmg(self): paths = self.packages_paths[PackageType.RUNTIME].values() dmg_file = os.path.join(self.output_dir, self._package_name("-packages.dmg")) # Create Disk Image cmd = "hdiutil create %s -ov" % dmg_file for p in paths: cmd += " -srcfolder %s" % p shell.call(cmd)
def init(git_dir): ''' Initialize a git repository with 'git init' @param git_dir: path of the git repository @type git_dir: str ''' shell.call('mkdir -p %s' % git_dir) shell.call('%s init' % GIT, git_dir)
def start(self): for c in self.checks: c() if self.config.distro_packages_install: packages = self.packages if self.config.distro_version in self.distro_packages: packages += self.distro_packages[self.config.distro_version] shell.call(self.tool % ' '.join(self.packages))
def _create_packages_dmg(self): paths = list(self.packages_paths[PackageType.RUNTIME].values()) dmg_file = os.path.join(self.output_dir, self._package_name('-packages.dmg')) # Create Disk Image cmd = 'hdiutil create %s -ov' % dmg_file for p in paths: cmd += ' -srcfolder %s' % p shell.call(cmd)
def checkout(url, dest): ''' Checkout a url to a given destination @param url: url to checkout @type url: string @param dest: path where to do the checkout @type url: string ''' shell.call('svn co --non-interactive --trust-server-cert %s %s' % (url, dest), env=CLEAN_ENV)
def update(repo, revision='HEAD'): ''' Update a repositry to a given revision @param repo: repository path @type revision: str @param revision: the revision to checkout @type revision: str ''' shell.call('svn up --non-interactive --trust-server-cert -r %s' % revision, repo, env=CLEAN_ENV)
def checkout(url, dest): ''' Checkout a url to a given destination @param url: url to checkout @type url: string @param dest: path where to do the checkout @type url: string ''' shell.call('svn co %s %s' % (url, dest))
def checkout_file(url, out_path): ''' Checkout a single file to out_path @param url: file URL @type url: str @param out_path: output path @type revision: str ''' shell.call('svn export --force %s %s' % (url, out_path))
def update(repo, revision='HEAD'): ''' Update a repositry to a given revision @param repo: repository path @type revision: str @param revision: the revision to checkout @type revision: str ''' shell.call('svn up -r %s' % revision, repo)
def check(self): if self.make_check: shell.call(self.make_check, self.build_dir, logfile=self.logfile, env=self.env)
def clean(self): self.maybe_add_system_libs(step='clean') shell.call(self.make_clean, self.make_dir, logfile=self.logfile, env=self.env)
def install_mingwget_deps(self): for dep in MINGWGET_DEPS: shell.call('mingw-get install %s' % dep)
def extract_engine(self, bundle, output_dir, engine='engine.exe'): self.options['bundle'] = bundle self.options['engine'] = engine shell.call(self.cmd_extract % self.options, output_dir) return os.path.join(output_dir, '%(engine)s' % self.options)
def check(self): if self.make_check: shell.call(self.make_check, self.build_dir)
def compile(self): shell.call(self.make, self.make_dir)
def _add_applications_link(self): # Create link to /Applications applications_link = os.path.join(self.approot, 'Applications') shell.call('ln -s /Applications %s' % applications_link)
def check(self): shell.call(self.make_check, self.meson_dir, logfile=self.logfile, env=self.env)
def configure(self): # Only use --disable-maintainer mode for real autotools based projects if os.path.exists(os.path.join(self.config_src_dir, 'configure.in')) or\ os.path.exists(os.path.join(self.config_src_dir, 'configure.ac')): self.configure_tpl += " --disable-maintainer-mode " self.configure_tpl += " --disable-silent-rules " if self.config.variants.gi and not self.disable_introspection: self.configure_tpl += " --enable-introspection " else: self.configure_tpl += " --disable-introspection " if self.autoreconf: shell.call(self.autoreconf_sh, self.config_src_dir) files = shell.check_call('find %s -type f -name config.guess' % self.config_src_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.guess') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) files = shell.check_call('find %s -type f -name config.sub' % self.config_src_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.sub') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) if self.config.platform == Platform.WINDOWS and \ self.supports_cache_variables: # On windows, environment variables are upperscase, but we still # need to pass things like am_cv_python_platform in lowercase for # configure and autogen.sh for k, v in os.environ.iteritems(): if k[2:6] == '_cv_': self.configure_tpl += ' %s="%s"' % (k, v) if self.add_host_build_target: if self.config.host is not None: self.configure_tpl += ' --host=%(host)s' if self.config.build is not None: self.configure_tpl += ' --build=%(build)s' if self.config.target is not None: self.configure_tpl += ' --target=%(target)s' use_configure_cache = self.config.use_configure_cache if self.use_system_libs and self.config.allow_system_libs: use_configure_cache = False if self.new_env or self.append_env: use_configure_cache = False if use_configure_cache and self.can_use_configure_cache: cache = os.path.join(self.config.sources, '.configure.cache') self.configure_tpl += ' --cache-file=%s' % cache MakefilesBase.configure(self)
def install(self): shell.call(self.make_install, self.meson_dir, logfile=self.logfile, env=self.env)
def compile(self): shell.call(self.make, self.meson_dir, logfile=self.logfile, env=self.env)
def _create_dmg(self, dmg_file, pkg_dirs): cmd = 'hdiutil create %s -ov' % dmg_file for pkg_dir in pkg_dirs: cmd += ' -srcfolder %s' % pkg_dir shell.call(cmd)
def start(self): packages = self.packages if self.config.distro_version in self.distro_packages: packages += self.distro_packages[self.config.distro_version] shell.call(self.tool % ' '.join(self.packages))
def clean(self): shell.call(self.make_clean, self.make_dir)
def install(self): shell.call(self.make_install, self.make_dir)
def _create_framework_library(self, libraries): libraries = ' '.join( ['-Wl,-reexport_library %s' % x for x in libraries]) shell.call('clang -dynamiclib -o %s -arch %s -install_name %s %s' % (self.libname, self.arch, self.install_name, libraries), env=self.env)
def attach_engine(self, bundle, output_dir, engine): self.options['bundle'] = bundle self.options['engine'] = engine shell.call(self.cmd_attach % self.options, output_dir) return bundle
def _create_framework_library(self, libraries): tmpdir = tempfile.mkdtemp() libname = os.path.basename(self.libname) # just to make sure if self.arch == Architecture.UNIVERSAL: archs = self.universal_archs else: archs = [self.arch] archs = [a if a != Architecture.X86 else 'i386' for a in archs] for thin_arch in archs: object_files_md5 = [] shell.call('mkdir -p %s' % thin_arch, tmpdir, env=self.env) tmpdir_thinarch = os.path.join(tmpdir, thin_arch) for lib in libraries: libprefix = os.path.split(lib)[-1].replace('.', '_') if len( archs ) > 1: #should be a fat file, split only to the arch we want libprefix += '_%s_' % thin_arch lib_tmpdir = self._split_static_lib(lib, thin_arch) else: lib_tmpdir = self._split_static_lib(lib) if lib_tmpdir is None: # arch is not supported in the static lib, skip it continue obj_files = shell.ls_files(['*.o'], lib_tmpdir) target_objs = [] for obj_f in obj_files: obj_path = os.path.join(lib_tmpdir, obj_f) md5 = shell.check_call('md5 -q %s' % obj_path, env=self.env).split('\n')[0] md5 = '%s-%s' % (md5, os.path.getsize(obj_path)) if md5 not in object_files_md5: target_name = '%s-%s' % (libprefix, obj_f) try: # Hard link source file to the target name os.link(obj_path, tmpdir_thinarch + '/' + target_name) except: # Fall back to cp if hard link doesn't work for any reason shell.call('cp %s %s' % (obj_path, target_name), tmpdir_thinarch, env=self.env) # If we have a duplicate object, commit any collected ones if target_name in target_objs: m.warning("Committing %d objects due to dup %s" % (len(target_objs), target_name)) shell.call('ar -cqS %s %s' % (libname, " ".join(target_objs)), tmpdir_thinarch, env=self.env) target_objs = [] target_objs.append(target_name) object_files_md5.append(md5) # Put all the collected target_objs in the archive. cmdline limit is 262k args on OSX. if len(target_objs): shell.call('ar -cqS %s %s' % (libname, " ".join(target_objs)), tmpdir_thinarch, env=self.env) shutil.rmtree(lib_tmpdir) shell.call('ar -s %s' % (libname), tmpdir_thinarch, env=self.env) files = [os.path.join(tmpdir, arch, libname) for arch in archs] self._check_duplicated_symbols(files, tmpdir) if len(archs) > 1: #merge the final libs into a fat file again shell.call('lipo %s -create -output %s' % (' '.join(files), self.install_name), tmpdir, env=self.env) else: shell.call( 'cp %s %s' % (os.path.join(tmpdir, self.arch, libname), self.install_name), tmpdir, env=self.env) shutil.rmtree(tmpdir)
def compile(self, source, output_dir): self.options['source'] = source shell.call(self.cmd % self.options, output_dir) return os.path.join(output_dir, source, '.msm')
def _clean_tmps(self): shell.call("rm -rf %s" % self.tmp_install_dir)
def submodules_update(git_dir, src_dir=None, fail=True): ''' Update somdules from local directory @param git_dir: path of the git repository @type git_dir: str @param src_dir: path or base URI of the source directory @type src_dir: src @param fail: raise an error if the command failed @type fail: false ''' if src_dir: config = shell.check_call('%s config --file=.gitmodules --list' % GIT, git_dir) config_array = [s.split('=', 1) for s in config.split('\n')] for c in config_array: if c[0].startswith('submodule.') and c[0].endswith('.path'): submodule = c[0][len('submodule.'):-len('.path')] shell.call( "%s config --file=.gitmodules submodule.%s.url %s" % (GIT, submodule, os.path.join(src_dir, c[1])), git_dir) shell.call("%s submodule init" % GIT, git_dir) shell.call("%s submodule sync" % GIT, git_dir) shell.call("%s submodule update" % GIT, git_dir, fail=fail) if src_dir: for c in config_array: if c[0].startswith('submodule.') and c[0].endswith('.url'): shell.call( "%s config --file=.gitmodules %s %s" % (GIT, c[0], c[1]), git_dir) shell.call("%s submodule sync" % GIT, git_dir)
def create_bundle(self, target_dir=None): ''' Creates the bundle structure Commands -> Versions/Current/Commands Headers -> Versions/Current/Headers Libraries -> Versions/Current/Libraries Home -> Versions/Current Resources -> Versions/Current/Resources Versions/Current -> Version/$VERSION/$ARCH Framework -> Versions/Current/Famework ''' if target_dir: tmp = target_dir else: tmp = tempfile.mkdtemp() #if self.config.target_arch == Architecture.UNIVERSAL: # arch_dir = '' #else: # arch_dir = self.config.target_arch vdir = os.path.join('Versions', self.package.sdk_version) #, arch_dir) rdir = '%s/Resources/' % vdir shell.call('mkdir -p %s' % rdir, tmp) links = { 'Versions/Current': '%s' % self.package.sdk_version, 'Resources': 'Versions/Current/Resources', 'Commands': 'Versions/Current/Commands', 'Headers': 'Versions/Current/Headers', 'Libraries': 'Versions/Current/Libraries' } inner_links = {'Commands': 'bin', 'Libraries': 'lib'} # Create the frameworks Info.plist file framework_plist = FrameworkPlist( self.package.name, self.package.org, self.package.version, self.package.shortdesc, self.package.config.min_osx_sdk_version) framework_plist.save(os.path.join(tmp, rdir, 'Info.plist')) # Add a link from Framework to Versions/Current/Framework if self.package.osx_framework_library is not None: name, link = self.package.osx_framework_library # Framework -> Versions/Current/Famework links[name] = 'Versions/Current/%s' % name # Create all links for dest, src in links.iteritems(): shell.call('ln -s %s %s' % (src, dest), tmp) inner_tmp = os.path.join(tmp, vdir) for dest, src in inner_links.iteritems(): shell.call('ln -s %s %s' % (src, dest), inner_tmp) # Copy the framework library to Versions/$VERSION/$ARCH/Framework if self.package.osx_framework_library is not None \ and os.path.exists(os.path.join(self.config.prefix, link)): shell.call('mkdir -p %s' % vdir, tmp) shutil.copy(os.path.join(self.config.prefix, link), os.path.join(tmp, vdir, name)) return tmp
def check(self): self.maybe_add_system_libs(step='check') shell.call(self.make_check, self.meson_dir, logfile=self.logfile, env=self.env)
def _split_static_lib(self, lib, thin_arch=None): '''Splits the static lib @lib into its object files Splits the static lib @lib into its object files and returns a new temporary directory where the .o files should be found. if @thin_arch was provided, it considers the @lib to be a fat binary and takes its thin version for the @thin_arch specified before retrieving the object files. ''' lib_tmpdir = tempfile.mkdtemp() shutil.copy(lib, lib_tmpdir) tmplib = os.path.join(lib_tmpdir, os.path.basename(lib)) if thin_arch: #should be a fat file, split only to the arch we want newname = '%s_%s' % (thin_arch, os.path.basename(lib)) cmd = ['lipo', tmplib, '-thin', thin_arch, '-output', newname] try: subprocess.check_output(cmd, cwd=lib_tmpdir, stderr=subprocess.STDOUT, universal_newlines=True, env=self.env) except subprocess.CalledProcessError as e: if 'does not contain the specified architecture' in e.output: return None raise tmplib = os.path.join(lib_tmpdir, newname) shell.call('ar -x %s' % tmplib, lib_tmpdir, env=self.env) # object files with the same name in an archive are overwritten # when they are extracted. osx's ar does not support the N count # modifier so after extracting all the files we remove them from # the archive to extract those with duplicated names. # eg: # ar t libavcodec.a -> mlpdsp.o mlpdsp.o (2 objects with the same name) # ar d libavcodec.a mlpdsp.o (we remove the first one) # ar t libavcodec.a -> mlpdsp.o (we only the second one now) files = shell.check_call('ar -t %s' % tmplib, lib_tmpdir, env=self.env).split('\n') # FIXME: We should use collections.Count but it's only available in # python 2.7+ dups = defaultdict(int) for f in files: dups[f] += 1 for f in dups: if dups[f] <= 1: continue for x in range(dups[f]): path = os.path.join(lib_tmpdir, f) new_path = os.path.join(lib_tmpdir, 'dup%d_' % x + f) # The duplicated overwrote the first one, so extract it again shell.call('ar -x %s %s' % (tmplib, f), lib_tmpdir, env=self.env) shutil.move(path, new_path) shell.call('ar -d %s %s' % (tmplib, f), lib_tmpdir, env=self.env) return lib_tmpdir
def install(self): self.maybe_add_system_libs(step='install') shell.call(self.make_install, self.meson_dir, logfile=self.logfile, env=self.env)
def check(self): shell.call(self.make_check, self.meson_dir)
def rmtree(path, onerror=None): call('rm -rf %s' % path, fail=False)
def compile(self): if self.using_msvc(): self.unset_toolchain_env() shell.call(self.make, self.make_dir)
def clean(self): shell.call(self.make_clean, self.meson_dir, logfile=self.logfile, env=self.env)
def local_checkout(git_dir, local_git_dir, commit): ''' Clone a repository for a given commit in a different location @param git_dir: destination path of the git repository @type git_dir: str @param local_git_dir: path of the source git repository @type local_git_dir: str @param commit: the commit to checkout @type commit: false ''' # reset to a commit in case it's the first checkout and the masterbranch is # missing shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir) shell.call('%s branch build' % GIT, local_git_dir, fail=False) shell.call('%s checkout build' % GIT, local_git_dir) shell.call('%s reset --hard %s' % (GIT, commit), local_git_dir) shell.call('%s clone %s -b build .' % (GIT, local_git_dir), git_dir) shell.call('%s submodule update --init --recursive' % (GIT), git_dir)