def _write_tar(self, filename, package_prefix, files): tar_cmd = ['tar', '-C', self.prefix, '-cf', filename] # ensure we provide a unique list of files to tar to avoid # it creating hard links/copies files = sorted(set(files)) if package_prefix: # Only transform the files (and not symbolic/hard links) tar_cmd += [ '--transform', 'flags=r;s|^|{}/|'.format(package_prefix) ] if self.compress == 'bz2': # Use lbzip2 when available for parallel compression if shutil.which('lbzip2'): tar_cmd += ['--use-compress-program=lbzip2'] else: tar_cmd += ['--bzip2'] elif self.compress == 'xz': tar_cmd += ['--use-compress-program=xz --threads=0'] else: raise AssertionError("Unknown tar compression: {}".format( self.compress)) try: shell.new_call(tar_cmd + files) except FatalError: os.replace(filename, filename + '.partial') raise
def build(self, output_dir, tarname, tmpdir, packagedir, srcdir): if self.config.target_arch == Architecture.X86: target = 'i686-redhat-linux' elif self.config.target_arch == Architecture.X86_64: target = 'x86_64-redhat-linux' else: raise FatalError(_('Architecture %s not supported') % \ self.config.target_arch) extra_options = '' if self._rpmbuild_support_nodebuginfo(): extra_options = '--nodebuginfo' shell.new_call( 'rpmbuild -bb %s --buildroot %s/buildroot --target %s %s' % (extra_options, tmpdir, target, self.spec_path)) paths = [] for d in os.listdir(packagedir): for f in os.listdir(os.path.join(packagedir, d)): out_path = os.path.join(output_dir, f) if os.path.exists(out_path): os.remove(out_path) paths.append(out_path) shutil.move(os.path.join(packagedir, d, f), output_dir) return paths
async def create_universal_file(self, output, inputlist, dirs): tmp_inputs = [] # relocate all files with the prefix of the merged file. # which must be done before merging them. for f in inputlist: # keep the filename in the suffix to preserve the filename extension tmp = tempfile.NamedTemporaryFile(suffix=os.path.basename(f)) tmp_inputs.append(tmp) shutil.copy(f, tmp.name) prefix_to_replace = [d for d in dirs if d in f][0] relocator = OSXRelocator(self.output_root, prefix_to_replace, False, logfile=self.logfile) # since we are using a temporary file, we must force the library id # name to real one and not based on the filename relocator.relocate_file(tmp.name) relocator.change_id(tmp.name, id=f.replace(prefix_to_replace, self.output_root)) cmd = [self.LIPO_CMD, '-create'] + [f.name for f in tmp_inputs ] + ['-output', output] shell.new_call(cmd) for tmp in tmp_inputs: tmp.close()
def change_id(self, object_file, id=None): id = id or object_file.replace(self.lib_prefix, '@rpath') filename = os.path.basename(object_file) if not self._is_mach_o_file(filename): return cmd = [INT_CMD, '-id', id, object_file] shell.new_call(cmd, fail=False, logfile=self.logfile)
def gen_dep(self, config, args, deps, sha): deps_filename = self.get_deps_filename(config) if os.path.exists(deps_filename): os.remove(deps_filename) log_filename = self.get_log_filename(config) if os.path.exists(log_filename): os.remove(log_filename) # Workaround special mangling for windows hidden in the config arch = os.path.basename(config.sources) try: shell.new_call([ 'tar', '-C', config.home_dir, '--use-compress-program=xz --threads=0', '--exclude=var/tmp', '-cf', deps_filename, 'build-tools', config.build_tools_cache, os.path.join('dist', arch), config.cache_file ]) url = self.make_url(config, args, '%s-%s' % (sha, self.deps_filename)) deps.insert( 0, { 'commit': sha, 'checksum': self.checksum(deps_filename), 'url': url }) deps = deps[0:self.log_size] with open(log_filename, 'w') as outfile: json.dump(deps, outfile, indent=1) except FatalError: os.remove(deps_filename) os.remove(log_filename) raise m.message('build-dep cache generated as {}'.format(deps_filename))
def create(self, libname, dllpath, platform, target_arch, outputdir): # foo.lib must not start with 'lib' if libname.startswith('lib'): self.filename = libname[3:] + '.lib' else: self.filename = libname + '.lib' bindir, dllname = os.path.split(dllpath) # Create the .def file defname = self.gendef(dllpath, outputdir, libname) # Create the import library lib_path, paths = self._get_lib_exe_path(target_arch, platform) # Prefer LIB.exe over dlltool: # http://sourceware.org/bugzilla/show_bug.cgi?id=12633 if lib_path is not None: if target_arch == Architecture.X86: arch = 'x86' else: arch = 'x64' env = self.config.env.copy() env['PATH'] = paths + ';' + env['PATH'] cmd = [ lib_path, '/DEF:' + defname, '/OUT:' + self.filename, '/MACHINE:' + arch ] shell.new_call(cmd, outputdir, logfile=self.logfile, env=env) else: m.warning("Using dlltool instead of lib.exe! Resulting .lib files" " will have problems with Visual Studio, see " " http://sourceware.org/bugzilla/show_bug.cgi?id=12633") self.dlltool(defname, dllname, outputdir) return os.path.join(outputdir, self.filename)
def check(self): if self.make_check: self.maybe_add_system_libs(step='check') shell.new_call(self.make_check, self.build_dir, logfile=self.logfile, env=self.env)
def change_id(self, object_file, id=None): id = id or object_file.replace(self.lib_prefix, '@rpath') filename = os.path.basename(object_file) if not (filename.endswith('so') or filename.endswith('dylib')): return cmd = [INT_CMD, '-id', id, object_file] shell.new_call(cmd, fail=False, logfile=self.logfile)
def dlltool(self, defname, dllname, outputdir): cmd = self.dlltool_bin + [ '-d', defname, '-l', self.filename, '-D', dllname ] shell.new_call(cmd, outputdir, logfile=self.logfile, env=self.config.env)
def _install_perl_deps(self): cpanm_installer = os.path.join(self.config.local_sources, 'cpanm') shell.new_call(['chmod', '+x', cpanm_installer]) # Install XML::Parser, required for intltool cmd = ['sudo', cpanm_installer, 'XML::Parser'] m.message( "Installing XML::Parser, may require a password for running \'" + " ".join(cmd) + "\'") shell.new_call(cmd)
def build(self, output_dir, tarname, tmpdir, packagedir, srcdir): if tarname: tar = tarfile.open(tarname, 'r:bz2') tar.extractall(tmpdir) tar.close() if not isinstance(self.package, MetaPackage): # for each dependency, copy the generated shlibs to this # package debian/shlibs.local, so that dpkg-shlibdeps knows where # our dependencies are without using Build-Depends: package_deps = self.store.get_package_deps(self.package.name, recursive=True) if package_deps: shlibs_local_path = os.path.join(packagedir, 'shlibs.local') f = open(shlibs_local_path, 'w') for p in package_deps: package_shlibs_path = os.path.join( tmpdir, self.package_prefix + p.name + '-shlibs') m.action(_('Copying generated shlibs file %s for ' \ 'dependency %s to %s') % (package_shlibs_path, p.name, shlibs_local_path)) if os.path.exists(package_shlibs_path): shutil.copyfileobj(open(package_shlibs_path, 'r'), f) f.close() shell.new_call( ['dpkg-buildpackage', '-rfakeroot', '-us', '-uc', '-D', '-b'], srcdir) # we may only have a generated shlibs file if at least we have # runtime files if tarname: # copy generated shlibs to tmpdir/$package-shlibs to be used by # dependent packages shlibs_path = os.path.join(packagedir, self.package_prefix + self.package.name, 'DEBIAN', 'shlibs') out_shlibs_path = os.path.join( tmpdir, self.package_prefix + self.package.name + '-shlibs') m.action( _('Copying generated shlibs file %s to %s') % (shlibs_path, out_shlibs_path)) if os.path.exists(shlibs_path): shutil.copy(shlibs_path, out_shlibs_path) # copy the newly created package, which should be in tmpdir # to the output dir paths = [] for f in os.listdir(tmpdir): if fnmatch(f, '*.deb'): out_path = os.path.join(output_dir, f) if os.path.exists(out_path): os.remove(out_path) paths.append(out_path) shutil.move(os.path.join(tmpdir, f), output_dir) return paths
def checkout_file(url, out_path): ''' Checkout a single file to out_path @param url: file URL @type url: str @param out_path: output path @type revision: str ''' shell.new_call(['svn', 'export', '--force', url, out_path])
def init(git_dir, logfile=None): ''' Initialize a git repository with 'git init' @param git_dir: path of the git repository @type git_dir: str ''' os.makedirs(git_dir, exist_ok=True) shell.new_call([GIT, 'init'], git_dir, logfile=logfile) ensure_user_is_set(git_dir, logfile=logfile)
def _create_framework_library(self, libraries): cmdline = ['clang', '-dynamiclib', '-o', self.libname, '-arch', self.arch] if self.target == Distro.OS_X: cmdline += ['-mmacosx-version-min=%s' % self.min_version] cmdline += ['-install_name', self.install_name] for lib in libraries: cmdline += ['-Wl,-reexport_library', lib] shell.new_call(cmdline, env=self.env)
def apply_patch(patch, git_dir, logfile=None): ''' Applies a commit patch usign 'git am' of a directory @param git_dir: path of the git repository @type git_dir: str @param patch: path of the patch file @type patch: str ''' shell.new_call([GIT, 'am', '--ignore-whitespace', patch], git_dir, logfile=logfile)
def init_directory(git_dir, logfile=None): ''' Initialize a git repository with the contents of a directory @param git_dir: path of the git repository @type git_dir: str ''' init(git_dir, logfile=logfile) shell.new_call([GIT, 'add', '--force', '-A', '.'], git_dir, logfile=logfile) shell.new_call([GIT, 'commit', '-m', 'Initial commit'], git_dir, logfile=logfile)
def _write_tar(self, filename, package_prefix, files): tar_cmd = ['tar', '-C', self.prefix, '-cf', filename] # Use lbzip2 when available for parallel compression if shutil.which('lbzip2'): tar_cmd += ['--use-compress-program=lbzip2'] else: tar_cmd += ['--bzip2'] try: shell.new_call(tar_cmd + files) except FatalError: os.replace(filename, filename + '.partial') raise
def _create_dmg(self): dmg_file = os.path.join( self.output_dir, '%s-%s-%s.dmg' % (self.package.app_name, self.package.version, self.config.target_arch)) # Create Disk Image cmd = [ 'hdiutil', 'create', dmg_file, '-volname', self.package.app_name, '-ov', '-srcfolder', self.approot ] shell.new_call(cmd) return dmg_file
def compile(self, objects, msi_file, output_dir, merge_module=False): self.options['objects'] = ' '.join(objects) self.options['msi'] = msi_file if merge_module: self.options['ext'] = 'msm' else: self.options['ext'] = 'msi' shell.new_call(self.cmd % self.options, output_dir) msi_file_path = os.path.join(output_dir, '%(msi)s.%(ext)s' % self.options) if self.options['wine'] == 'wine': shell.new_call(['chmod', '0755', msi_file_path]) return msi_file_path
def code_sign(self): ''' Codesign OSX build-tools binaries ''' def get_real_path(fp): return os.path.realpath(os.path.join(self.config.prefix, fp)) def file_is_bin(fp): return fp.split('/')[0] in ['bin'] for f in set([get_real_path(x) for x in self.files_list() \ if file_is_bin(x)]): shell.new_call(['codesign', '-f', '-s', '-', f], logfile=self.logfile, env=self.env)
def init_directory(git_dir, logfile=None): ''' Initialize a git repository with the contents of a directory @param git_dir: path of the git repository @type git_dir: str ''' init(git_dir, logfile=logfile) try: shell.new_call([GIT, 'add', '--force', '-A', '.'], git_dir, logfile=logfile) shell.call('%s commit -m "Initial commit" > /dev/null 2>&1' % GIT, git_dir, logfile=logfile) except: pass
def add_remote(git_dir, name, url, logfile=None): ''' Add a remote to a git repository @param git_dir: destination path of the git repository @type git_dir: str @param name: name of the remote @type name: str @param url: url of the remote @type url: str ''' try: shell.new_call([GIT, 'remote', 'add', name, url], git_dir, logfile=logfile) except: shell.new_call([GIT, 'remote', 'set-url', name, url], git_dir, logfile=logfile)
def create_bundle(self, tmp=None): ''' Creates the Application bundle structure Contents/MacOS/MainExectuable -> Contents/Home/bin/main-executable Contents/Info.plist ''' tmp = tmp or tempfile.mkdtemp() contents = os.path.join(tmp, 'Contents') macos = os.path.join(contents, 'MacOS') resources = os.path.join(contents, 'Resources') for p in [contents, macos, resources]: if not os.path.exists(p): os.makedirs(p) # Create Contents/Info.plist # Use the template if provided in the package plist_tpl = None if os.path.exists(self.package.resources_info_plist): plist_tpl = open(self.package.resources_info_plist).read() framework_plist = ApplicationPlist( self.package.app_name, self.package.org, self.package.version, self.package.shortdesc, self.package.config.min_osx_sdk_version, os.path.basename(self.package.resources_icon_icns), plist_tpl) framework_plist.save(os.path.join(contents, 'Info.plist')) # Copy app icon to Resources shutil.copy(self.package.resources_icon_icns, resources) # Link or create a wrapper for the executables in Contents/MacOS for name, path, use_wrapper, wrapper in self.package.get_commands(): filename = os.path.join(macos, name) if use_wrapper: wrapper = self.package.get_wrapper(path, wrapper) if not wrapper: continue with open(filename, 'w') as f: f.write(wrapper) shell.new_call(['chmod', '+x', filename]) else: # FIXME: We need to copy the binary instead of linking, because # beeing a different path, @executable_path will be different # and it we will need to set a different relative path with # install_name_tool shutil.copy(os.path.join(contents, 'Home', path), filename) return tmp
def run(self, config, args): if args.recipe + args.package + args.package_recipes == 0: m.error( 'Error: You need to specify either recipe, package or package-recipes ' 'mode to generate the dependency graph') return if args.recipe + args.package + args.package_recipes > 1: m.error( 'Error: You can only specify recipe, package or package-recipes but not more than one' ) return if not shutil.which('dot'): m.error( 'Error: dot command not found. Please install graphviz it using ' 'your package manager. e.g. apt/dnf/brew install graphviz') return label = '' if args.recipe: self.graph_type = GraphType.RECIPE label = 'recipe' elif args.package: self.graph_type = GraphType.PACKAGE label = 'package' elif args.package_recipes: self.graph_type = GraphType.PACKAGE_RECIPES label = 'package\'s recipes' if self.graph_type == GraphType.RECIPE or self.graph_type == GraphType.PACKAGE_RECIPES: self.cookbook = CookBook(config) if self.graph_type == GraphType.PACKAGE or self.graph_type == GraphType.PACKAGE_RECIPES: self.package_store = PackagesStore(config) name = args.name[0] output = args.output[0] if args.output else name + '.svg' tmp = tempfile.NamedTemporaryFile() dot = 'digraph {{\n\tlabel="{} {}";\n{}}}\n'.format( name, label, self._dot_gen(name, self.graph_type)) with open(tmp.name, 'w') as f: f.write(dot) shell.new_call(['dot', '-Tsvg', tmp.name, '-o', output]) m.message("Dependency graph for %s generated at %s" % (name, output))
def _create_packages_dmg(self): paths = list(self.packages_paths[PackageType.RUNTIME].values()) dmg_file = os.path.join(self.output_dir, self._package_name('-packages.dmg')) m.action(_("Creating image %s ") % dmg_file) # create a temporary directory to store packages workdir = os.path.join(self.tmp, "hdidir") os.makedirs(workdir) try: for p in paths: shutil.copy(p, workdir) # Create Disk Image cmd = ['hdiutil', 'create', dmg_file, '-ov', '-srcfolder', workdir] shell.new_call(cmd) finally: shutil.rmtree(workdir)
def start(self, jobs=0): for c in self.checks: c() if self.config.distro_packages_install: extra_packages = self.config.extra_bootstrap_packages.get( self.config.platform, None) if extra_packages: self.packages += extra_packages.get(self.config.distro, []) self.packages += extra_packages.get(self.config.distro_version, []) tool = self.tool if self.assume_yes: tool += self.yes_arg; tool += self.command; cmd = tool + self.packages m.message("Running command '%s'" % ' '.join(cmd)) shell.new_call(cmd)
def clean(git_dir, logfile=None): ''' Clean a git respository with clean -dfx @param git_dir: path of the git repository @type git_dir: str ''' return shell.new_call([GIT, 'clean', '-dfx'], git_dir, logfile=logfile)
def _write_tar(self, filename, package_prefix, files): tar_cmd = ['tar', '-C', self.prefix, '-cf', filename] if package_prefix: tar_cmd += ['--transform=s|^|{}/|'.format(package_prefix)] if self.compress == 'bz2': # Use lbzip2 when available for parallel compression if shutil.which('lbzip2'): tar_cmd += ['--use-compress-program=lbzip2'] else: tar_cmd += ['--bzip2'] elif self.compress == 'xz': tar_cmd += ['--use-compress-program=xz --threads=0'] try: shell.new_call(tar_cmd + files) except FatalError: os.replace(filename, filename + '.partial') raise
def _create_dmg(self, pkg_path, dmg_file): # Create a new folder with the pkg and the user resources dmg_dir = os.path.join(self.tmp, 'dmg') os.makedirs(dmg_dir) for r in self.package.user_resources: r = os.path.join(self.config.prefix, r) r_dir = os.path.split(r)[1] shell.copy_dir(r, os.path.join(dmg_dir, r_dir)) shutil.move(pkg_path, dmg_dir) # Create Disk Image cmd = [ 'hdiutil', 'create', dmg_file, '-volname', self.package.name, '-ov', '-srcfolder', dmg_dir ] shell.new_call(cmd) return dmg_file
def change_libs_path(self, object_file): depth = len(object_file.split('/')) - len(self.root.split('/')) - 1 p_depth = '/..' * depth rpaths = ['.'] rpaths += ['@loader_path' + p_depth, '@executable_path' + p_depth] rpaths += ['@loader_path' + '/../lib', '@executable_path' + '/../lib'] if not (object_file.endswith('so') or object_file.endswith('dylib')): return if depth > 1: rpaths += ['@loader_path/..', '@executable_path/..'] for p in rpaths: cmd = [INT_CMD, '-add_rpath', p, object_file] shell.new_call(cmd, fail=False) for lib in self.list_shared_libraries(object_file): if self.lib_prefix in lib: new_lib = lib.replace(self.lib_prefix, '@rpath') cmd = [INT_CMD, '-change', lib, new_lib, object_file] shell.new_call(cmd, fail=False, logfile=self.logfile)