def _create_packages(self): for p in self.packages: m.action(_("Creating package %s ") % p) packager = OSXPackage(self.config, p, self.store) try: paths = packager.pack( self.output_dir, self.devel, self.force, self.keep_temp, self.package.version, install_dir=self.package.get_install_dir(), include_dirs=self.include_dirs, sdk_version=self.package.sdk_version, ) m.action(_("Package created sucessfully")) except EmptyPackageError: paths = [None, None] if paths[0] is not None: self.packages_paths[PackageType.RUNTIME][p] = paths[0] else: self.empty_packages[PackageType.RUNTIME].append(p) if paths[1] is not None: self.packages_paths[PackageType.DEVEL][p] = paths[1] else: self.empty_packages[PackageType.DEVEL].append(p)
def run(self, config, args): self.store = PackagesStore(config) p = self.store.get_package(args.package[0]) if args.skip_deps_build and args.only_build_deps: raise UsageError(_("Cannot use --skip-deps-build together with " "--only-build-deps")) if not args.skip_deps_build: self._build_deps(config, p, args.no_devel) if args.only_build_deps: return if p is None: raise PackageNotFoundError(args.package[0]) if args.tarball: pkg = DistTarball(config, p, self.store) else: pkg = Packager(config, p, self.store) m.action(_("Creating package for %s") % p.name) if args.tarball: paths = pkg.pack(os.path.abspath(args.output_dir), args.no_devel, args.force, args.keep_temp, split=not args.no_split) else: paths = pkg.pack(os.path.abspath(args.output_dir), args.no_devel, args.force, args.keep_temp) if None in paths: paths.remove(None) p.post_install(paths) m.action(_("Package successfully created in %s") % ' '.join([os.path.abspath(x) for x in paths]))
def install_python_sdk(self): ### FIXME : MOVE OVER REPOSITORY TO STANDARD ROOT old_sdk_git_root = 'git://anongit.freedesktop.org/gstreamer-sdk' m.action(_("Installing Python headers")) tmp_dir = tempfile.mkdtemp() shell.call("git clone %s" % os.path.join(old_sdk_git_root, 'windows-external-sdk.git'), tmp_dir) python_headers = os.path.join(self.prefix, 'include', 'Python2.7') python_headers = to_unixpath(os.path.abspath(python_headers)) shell.call('mkdir -p %s' % python_headers) python_libs = os.path.join(self.prefix, 'lib') python_libs = to_unixpath(python_libs) temp = to_unixpath(os.path.abspath(tmp_dir)) shell.call('cp -f %s/windows-external-sdk/python27/%s/include/* %s' % (temp, self.version, python_headers)) shell.call('cp -f %s/windows-external-sdk/python27/%s/lib/* %s' % (temp, self.version, python_libs)) try: os.remove('%s/lib/python.dll' % self.prefix) except: pass shell.call('ln -s python27.dll python.dll', '%s/lib' % self.prefix) shutil.rmtree(tmp_dir)
def bundle(self): # If not devel wanted, we make a clean bundle with only # file needed to execute steps = [ ("prepare-install-dir", [(_("Copy install path"), self._copy_installdir, True), (_("Installing bundle files"), self._install_bundle_specific_files, True), (_("Make all paths relatives"), self._make_paths_relative, True), ] ), ("generate-tarball", [(_("Running AppImageAssistant"), self._generate_bundle, True), (_("Generating md5"), self._generate_md5sum, True) ] ), ("clean-install-dir", [(_("Clean tmp dirs"), self._clean_tmps, not self.keep_temp)] ) ] for step in steps: shell.set_logfile_output("%s/%s-bundle-%s.log" % (self.config.logs, self.package.name, step[0])) for substep in step[1]: m.build_step('1', '1', self.package.name + " linux bundle", substep[0]) if substep[2] is True: substep[1]() else: m.action(_("Step not wanted")) shell.close_logfile_output()
def _create_package(self, config, p, args): if args.type == 'native': pkg = Packager(config, p, self.store) else: pkg = DistArchive(config, p, self.store, args.type) m.action(_("Creating package for %s") % p.name) p.pre_package() paths = pkg.pack(os.path.abspath(args.output_dir), not args.no_devel, args.force, args.keep_temp) if None in paths: paths.remove(None) if '' in paths: paths.remove('') paths = p.post_package(paths) or paths for p in paths: BUF_SIZE = 65536 # 64kb chunks sha1 = hashlib.sha1() with open(os.path.abspath(p), 'rb') as f: while True: data = f.read(BUF_SIZE) if not data: break sha1.update(data) sha1sum = sha1.hexdigest() m.action(_("Package successfully created in %s %s") % (os.path.abspath(p), sha1sum)) # Generate the sha1sum file with open('%s.sha1' % p, 'w+') as sha1file: sha1file.write(sha1sum) return paths
def _cook_recipe(self, recipe, count, total): if not self.cookbook.recipe_needs_build(recipe.name) and \ not self.force: m.build_step(count, total, recipe.name, _("already built")) return if self.missing_files: # create a temp file that will be used to find newer files tmp = tempfile.NamedTemporaryFile() recipe.force = self.force for desc, step in recipe.steps: m.build_step(count, total, recipe.name, step) # check if the current step needs to be done if self.cookbook.step_done(recipe.name, step) and not self.force: m.action(_("Step done")) continue try: # call step function stepfunc = getattr(recipe, step) if not stepfunc: raise FatalError(_('Step %s not found') % step) stepfunc() # update status successfully self.cookbook.update_step_status(recipe.name, step) except FatalError: self._handle_build_step_error(recipe, step) except Exception: raise BuildStepError(recipe, step, traceback.format_exc()) self.cookbook.update_build_status(recipe.name, recipe.built_version()) if self.missing_files: self._print_missing_files(recipe, tmp)
def _cook_recipe(self, recipe, count, total): if not self.cookbook.recipe_needs_build(recipe.name) and \ not self.force: m.build_step(count, total, recipe.name, _("already built")) return if self.missing_files: # create a temp file that will be used to find newer files tmp = tempfile.NamedTemporaryFile() recipe.force = self.force for desc, step in recipe.steps: m.build_step(count, total, recipe.name, step) # check if the current step needs to be done if self.cookbook.step_done(recipe.name, step) and not self.force: m.action(_("Step done")) continue try: # call step function stepfunc = getattr(recipe, step) if not stepfunc: raise FatalError(_('Step %s not found') % step) shell.set_logfile_output("%s/%s-%s.log" % (recipe.config.logs, recipe, step)) stepfunc() # update status successfully self.cookbook.update_step_status(recipe.name, step) shell.close_logfile_output() except FatalError, e: shell.close_logfile_output(dump=True) self._handle_build_step_error(recipe, step, e.arch) except Exception: shell.close_logfile_output(dump=True) raise BuildStepError(recipe, step, traceback.format_exc())
def configure(self): if self.supports_non_src_build: self.config_sh = os.path.join(self.repo_dir, self.config_sh) # Only use --disable-maintainer mode for real autotools based projects if os.path.exists(os.path.join(self.make_dir, 'configure.in')) or\ os.path.exists(os.path.join(self.make_dir, 'configure.ac')): self.configure_tpl += " --disable-maintainer-mode " self.configure_tpl += " --disable-silent-rules " if self.autoreconf: shell.call(self.autoreconf_sh, self.make_dir) files = shell.check_call('find %s -type f -name config.guess' % self.make_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.guess') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) files = shell.check_call('find %s -type f -name config.sub' % self.make_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.sub') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) if self.config.platform == Platform.WINDOWS and \ self.supports_cache_variables: # On windows, environment variables are upperscase, but we still # need to pass things like am_cv_python_platform in lowercase for # configure and autogen.sh for k, v in os.environ.iteritems(): if k[2:6] == '_cv_': self.configure_tpl += ' %s="%s"' % (k, v) if self.add_host_build_target: if self.config.host is not None: self.configure_tpl += ' --host=%(host)s' if self.config.build is not None: self.configure_tpl += ' --build=%(build)s' if self.config.target is not None: self.configure_tpl += ' --target=%(target)s' use_configure_cache = self.config.use_configure_cache if self.use_system_libs and self.config.allow_system_libs: use_configure_cache = False if self.new_env or self.append_env: use_configure_cache = False if use_configure_cache and self.can_use_configure_cache: cache = os.path.join(self.config.sources, '.configure.cache') self.config_sh += ' --cache-file=%s' % cache MakefilesBase.configure(self)
def install_python_sdk(self): m.action(_("Installing Python headers")) tmp_dir = tempfile.mkdtemp() shell.call("git clone %s" % os.path.join(self.config.git_root, 'windows-external-sdk.git'), tmp_dir) python_headers = os.path.join(self.prefix, 'include', 'Python2.7') python_headers = to_unixpath(os.path.abspath(python_headers)) shell.call('mkdir -p %s' % python_headers) python_libs = os.path.join(self.prefix, 'lib') python_libs = to_unixpath(python_libs) temp = to_unixpath(os.path.abspath(tmp_dir)) shell.call('cp -f %s/windows-external-sdk/python27/%s/include/* %s' % (temp, self.version, python_headers)) shell.call('cp -f %s/windows-external-sdk/python27/%s/lib/* %s' % (temp, self.version, python_libs)) pydll = '%s/lib/python.dll' % self.prefix try: os.remove(pydll) except: pass shell.call('ln -s python27.dll %s' % (pydll)) shutil.rmtree(tmp_dir)
def _create_framework_bundle_packager(self): m.action(_("Creating framework package")) packager = FrameworkBundlePackager(self.package, 'ios-framework', 'GStreamer', 'GStreamer iOS Framework Bundle Version %s' % (self.package.version), '3ffe67c2-3421-411f-8287-e8faa892f853') return packager
def _create_product(self): packagebuild = PackageBuild() resources = self._copy_scripts() app_pkg_name = self._package_name(".pkg") app_pkg = os.path.join(self.tmp, app_pkg_name) packagebuild.create_package( self.approot, self.package.identifier(), self.package.version, self.package.shortdesc, app_pkg, "/Applications", scripts_path=resources, ) self.package.packages = [(self.package.name, True, True)] m.action(_("Creating Distribution.xml for package %s " % self.package)) distro = DistributionXML( self.package, self.store, self.tmp, {self.package: app_pkg_name}, self.store.get_package_deps(self.package), PackageType.RUNTIME, self.config.target_arch, home_folder=False, ) distro_path = tempfile.NamedTemporaryFile().name distro.write(distro_path) output_file = os.path.join(self.output_dir, self._package_name(".pkg")) output_file = os.path.abspath(output_file) pb = ProductBuild() pb.create_package(distro_path, output_file, [self.package.relative_path("."), self.tmp]) return output_file
def install_gl_headers(self): m.action("Installing wglext.h") if self.arch == Architecture.X86: inst_path = os.path.join(self.prefix, 'i686-w64-mingw32/include/GL/wglext.h') else: inst_path = os.path.join(self.prefix, 'x86_64-w64-mingw32/include/GL/wglext.h') gl_header = 'http://www.opengl.org/registry/api/GL/wglext.h' shell.download(gl_header, inst_path)
def extract(self): m.action(_('Extracting tarball to %s') % self.build_dir) if os.path.exists(self.build_dir): shutil.rmtree(self.build_dir) shell.unpack(self.download_path, self.config.sources) if self.tarball_dirname is not None: os.rename(os.path.join(self.config.sources, self.tarball_dirname), self.build_dir) self.apply_patches()
def fetch(self): m.action(_('Fetching tarball %s to %s') % (self.url, self.download_path)) if not os.path.exists(self.repo_dir): os.makedirs(self.repo_dir) if self.url.startswith('file://'): shutil.copy(self.url[7:], self.download_path) else: shell.download(self.url, self.download_path, check_cert=False)
def wipe(self, paths): for path in paths: m.action(_("Removing path: %s") % path) if not os.path.exists(path): continue if os.path.isfile(path): if not os.access(path, os.W_OK): os.chmod(path, stat.S_IWUSR) os.remove(path) else: shutil.rmtree(path, onerror=_onerror)
def build(self, output_dir, tarname, tmpdir, packagedir, srcdir): if tarname: tar = tarfile.open(tarname, 'r:bz2') tar.extractall(tmpdir) tar.close() if not isinstance(self.package, MetaPackage): # for each dependency, copy the generated shlibs to this # package debian/shlibs.local, so that dpkg-shlibdeps knows where # our dependencies are without using Build-Depends: package_deps = self.store.get_package_deps(self.package.name, recursive=True) if package_deps: shlibs_local_path = os.path.join(packagedir, 'shlibs.local') f = open(shlibs_local_path, 'w') for p in package_deps: package_shlibs_path = os.path.join(tmpdir, self.package_prefix + p.name + '-shlibs') m.action(_('Copying generated shlibs file %s for ' \ 'dependency %s to %s') % (package_shlibs_path, p.name, shlibs_local_path)) if os.path.exists(package_shlibs_path): shutil.copyfileobj(open(package_shlibs_path, 'r'), f) f.close() shell.call('dpkg-buildpackage -rfakeroot -us -uc -D -b', srcdir) # we may only have a generated shlibs file if at least we have # runtime files if tarname: # copy generated shlibs to tmpdir/$package-shlibs to be used by # dependent packages shlibs_path = os.path.join(packagedir, self.package_prefix + self.package.name, 'DEBIAN', 'shlibs') out_shlibs_path = os.path.join(tmpdir, self.package_prefix + self.package.name + '-shlibs') m.action(_('Copying generated shlibs file %s to %s') % (shlibs_path, out_shlibs_path)) if os.path.exists(shlibs_path): shutil.copy(shlibs_path, out_shlibs_path) # copy the newly created package, which should be in tmpdir # to the output dir paths = [] for f in os.listdir(tmpdir): if fnmatch(f, '*.deb'): out_path = os.path.join(output_dir, f) if os.path.exists(out_path): os.remove(out_path) paths.append(out_path) shutil.move(os.path.join(tmpdir, f), output_dir) return paths
def create_tree(self, tmpdir): # create a tmp dir to use as topdir if tmpdir is None: tmpdir = tempfile.mkdtemp() srcdir = os.path.join(tmpdir, self.full_package_name) os.mkdir(srcdir) packagedir = os.path.join(srcdir, 'debian') os.mkdir(packagedir) os.mkdir(os.path.join(packagedir, 'source')) m.action(_('Creating debian package structure at %s for package %s') % (srcdir, self.package.name)) return (tmpdir, packagedir, srcdir)
def extract(self): m.action(_('Extracting tarball to %s') % self.build_dir) shell.unpack(self.download_path, self.config.sources) if self.tarball_dirname is not None: if os.path.exists(self.build_dir): shutil.rmtree(self.build_dir) os.rename(os.path.join(self.config.sources, self.tarball_dirname), self.build_dir) for patch in self.patches: if not os.path.isabs(patch): patch = self.relative_path(patch) shell.apply_patch(patch, self.build_dir, self.strip)
def _create_pmdoc(self, package_type): self.package.set_mode(package_type) m.action(_("Creating pmdoc for package %s " % self.package)) pmdoc = PMDoc(self.package, self.store, self.tmp, self.packages_paths[package_type], self.empty_packages[package_type], package_type) pmdoc_path = pmdoc.create() output_file = os.path.join(self.output_dir, self._package_name('.pkg')) output_file = os.path.abspath(output_file) pm = PackageMaker() pm.create_package_from_pmdoc(pmdoc_path, output_file) return output_file
def _create_framework_bundle_package(self): m.action(_("Creating framework package")) packager = FrameworkBundlePackager(self.package, 'osx-framework', 'Framework Bundle', '3ffe67c2-4565-411f-8287-e8faa892f853') package = packager.package self.store.add_package(package) packages = self.package.packages[:] + [(package.name, True, True)] self.package.packages = packages path = packager.pack(self.output_dir)[0] self.packages_paths[PackageType.RUNTIME][package] = path self.empty_packages[PackageType.DEVEL].append(package)
def run(self, config, args): name = args.name[0] version = args.version[0] filename = os.path.join(config.recipes_dir, '%s.recipe' % name) if not args.force and os.path.exists(filename): m.warning(_("Recipe '%s' (%s) already exists, " "use -f to replace" % (name, filename))) return template_args = {} template = RECEIPT_TPL template_args['name'] = name template_args['version'] = version if args.licenses: licenses = args.licenses.split(',') self.validate_licenses(licenses) template += LICENSES_TPL template_args['licenses'] = ', '.join( ['License.' + self.supported_licenses[l] \ for l in licenses]) if args.commit: template += COMMIT_TPL template_args['commit'] = args.commit if args.origin: template += ORIGIN_TPL template_args['origin'] = args.origin if args.deps: template += DEPS_TPL deps = args.deps.split(',') cookbook = CookBook(config) for dname in deps: try: recipe = cookbook.get_recipe(dname) except RecipeNotFoundError as ex: raise UsageError(_("Error creating recipe: " "dependant recipe %s does not exist") % dname) template_args['deps'] = deps try: f = open(filename, 'w') f.write(template % template_args) f.close() m.action(_("Recipe '%s' successfully created in %s") % (name, filename)) except IOError as ex: raise FatalError(_("Error creating recipe: %s") % ex)
def pack_deps(self, output_dir, tmpdir, force): for p in self.store.get_package_deps(self.package.name): stamp_path = os.path.join(tmpdir, p.name + "-stamp") if os.path.exists(stamp_path): # already built, skipping continue m.action(_("Packing dependency %s for package %s") % (p.name, self.package.name)) packager = self.__class__(self.config, p, self.store) try: packager.pack(output_dir, self.devel, force, True, True, tmpdir) except EmptyPackageError: self._empty_packages.append(p)
def _create_product(self, package_type): self.package.set_mode(package_type) m.action(_("Creating Distribution.xml for package %s " % self.package)) distro = DistributionXML(self.package, self.store, self.tmp, self.packages_paths[package_type], self.empty_packages[package_type], package_type, self.config.target_arch, home_folder=self.home_folder) distro_path = tempfile.NamedTemporaryFile().name distro.write(distro_path) output_file = os.path.join(self.output_dir, self._package_name('.pkg')) output_file = os.path.abspath(output_file) pb = ProductBuild() pb.create_package(distro_path, output_file) return output_file
def fetch(self): if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) cached_dir = os.path.join(self.config.cached_sources, self.package_name) if os.path.isdir(os.path.join(cached_dir, ".svn")): m.action(_('Copying cached repo from %s to %s instead of %s') % (cached_dir, self.repo_dir, self.url)) shell.copy_dir(cached_dir, self.repo_dir) return os.makedirs(self.repo_dir) svn.checkout(self.url, self.repo_dir) svn.update(self.repo_dir, self.revision)
def fetch(self): if not os.path.exists(self.repo_dir): os.makedirs(self.repo_dir) cached_file = os.path.join(self.config.cached_sources, self.package_name, self.tarball_name) if os.path.isfile(cached_file): m.action(_('Copying cached tarball from %s to %s instead of %s') % (cached_file, self.download_path, self.url)) shutil.copy(cached_file, self.download_path) return m.action(_('Fetching tarball %s to %s') % (self.url, self.download_path)) shell.download(self.url, self.download_path, check_cert=False)
def _create_merge_modules(self, package_type): packagedeps = {} for package in self.packagedeps: package.set_mode(package_type) m.action("Creating Merge Module for %s" % package) packager = MergeModulePackager(self.config, package, self.store) try: path = packager.create_merge_module(self.output_dir, package_type, self.force, self.package.version, self.keep_temp) packagedeps[package] = path except EmptyPackageError: m.warning("Package %s is empty" % package) self.packagedeps = packagedeps self.merge_modules[package_type] = packagedeps.values()
def runargs(self, config, output_dir, filename, libraries): if not os.path.exists(output_dir): os.makedirs(output_dir) if len(libraries) == 0: raise UsageError("You need to specify at least one library name") filename = filename or libraries[0] filepath = os.path.join(output_dir, '%s.xcconfig' % filename) xcconfig = XCConfig(libraries) xcconfig.create(filepath) m.action('Created %s.xcconfig' % filename) m.message('XCode config file were sucessfully created in %s' % os.path.abspath(filepath))
def runargs(self, config, output_dir, prefix=DEFAULT_PREFIX_MACRO): if not os.path.exists(output_dir): os.makedirs(output_dir) for pc in PkgConfig.list_all(): p2v = PkgConfig2VSProps(pc, prefix=config.prefix, inherit_common=True, prefix_replacement='$(%s)' % prefix) p2v.create(output_dir) m.action('Created %s.props' % pc) common = CommonProps(prefix) common.create(output_dir) m.message('Property sheets files were sucessfully created in %s' % os.path.abspath(output_dir))
def fetch(self): if os.path.exists(self.repo_dir): shutil.rmtree(self.repo_dir) cached_dir = os.path.join(self.config.cached_sources, self.package_name) if os.path.isdir(os.path.join(cached_dir, ".svn")): m.action(_('Copying cached repo from %s to %s instead of %s') % (cached_dir, self.repo_dir, self.url)) shell.copy_dir(cached_dir, self.repo_dir) return os.makedirs(self.repo_dir) if self.offline: raise FatalError('Offline mode: no cached svn repos found for {} at {!r}' ''.format(self.package_name, self.config.cached_sources)) svn.checkout(self.url, self.repo_dir) svn.update(self.repo_dir, self.revision)
def _create_packages_dmg(self): paths = self.packages_paths[PackageType.RUNTIME].values() dmg_file = os.path.join(self.output_dir, self._package_name('-packages.dmg')) m.action(_("Creating image %s ") % dmg_file) # create a temporary directory to store packages workdir = os.path.join (self.tmp, "hdidir") os.makedirs(workdir) try: for p in paths: shutil.copy(p, workdir) # Create Disk Image cmd = 'hdiutil create %s -ov -srcfolder %s' % (dmg_file, workdir) shell.call(cmd) finally: shutil.rmtree(workdir)
def configure(self): # Only use --disable-maintainer mode for real autotools based projects if os.path.exists(os.path.join(self.config_src_dir, 'configure.in')) or\ os.path.exists(os.path.join(self.config_src_dir, 'configure.ac')): self.configure_tpl += " --disable-maintainer-mode " self.configure_tpl += " --disable-silent-rules " if self.config.variants.gi and not self.disable_introspection: self.configure_tpl += " --enable-introspection " else: self.configure_tpl += " --disable-introspection " if self.autoreconf: shell.call(self.autoreconf_sh, self.config_src_dir) files = shell.check_call('find %s -type f -name config.guess' % self.config_src_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.guess') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) files = shell.check_call('find %s -type f -name config.sub' % self.config_src_dir).split('\n') files.remove('') for f in files: o = os.path.join(self.config._relative_path('data'), 'autotools', 'config.sub') m.action("copying %s to %s" % (o, f)) shutil.copy(o, f) if self.config.platform == Platform.WINDOWS and \ self.supports_cache_variables: # On windows, environment variables are upperscase, but we still # need to pass things like am_cv_python_platform in lowercase for # configure and autogen.sh for k, v in os.environ.iteritems(): if k[2:6] == '_cv_': self.configure_tpl += ' %s="%s"' % (k, v) if self.add_host_build_target: if self.config.host is not None: self.configure_tpl += ' --host=%(host)s' if self.config.build is not None: self.configure_tpl += ' --build=%(build)s' if self.config.target is not None: self.configure_tpl += ' --target=%(target)s' use_configure_cache = self.config.use_configure_cache if self.use_system_libs and self.config.allow_system_libs: use_configure_cache = False if self.new_env or self.append_env: use_configure_cache = False if use_configure_cache and self.can_use_configure_cache: cache = os.path.join(self.config.sources, '.configure.cache') self.configure_tpl += ' --cache-file=%s' % cache MakefilesBase.configure(self)
def _create_framework_bundle_packager(self): m.action(_("Creating framework package")) packager = FrameworkBundlePackager( self.package, 'ios-framework', 'Framework Bundle', '3ffe67c2-3421-411f-8287-e8faa892f853') return packager
platform_include_files) template_args['platform_files'] = platform_files if args.platform_files_devel or platform_include_files_devel: template += PLATFORM_FILES_DEVEL_TPL platform_files_devel = self.parse_platform_files( args.platform_files_devel, platform_include_files_devel) template_args['platform_files_devel'] = platform_files_devel try: f = open(filename, 'w') f.write(template % template_args) f.close() m.action( _("Package '%s' successfully created in %s") % (name, filename)) except IOError, ex: raise FatalError(_("Error creating package: %s") % ex) def merge_dict(self, d1, d2): ret = d1 for k, v in d2.iteritems(): if k in ret: ret[k].extend(v) else: ret[k] = v return ret def validate_licenses(self, licenses): for l in licenses:
def run(self, config, args): name = args.name[0] version = args.version[0] store = PackagesStore(config) filename = os.path.join(config.packages_dir, '%s.package' % name) if not args.force and os.path.exists(filename): m.warning( _("Package '%s' (%s) already exists, " "use -f to replace" % (name, filename))) return template_args = {} template = RECEIPT_TPL template_args['name'] = name template_args['version'] = version if args.short_desc: template_args['shortdesc'] = args.short_desc else: template_args['shortdesc'] = name if args.vendor: template += VENDOR_TPL template_args['vendor'] = args.vendor if args.url: template += URL_TPL template_args['url'] = args.url if args.license: self.validate_licenses([args.license]) template += LICENSE_TPL template_args['license'] = \ 'License.' + self.supported_licenses[args.license] deps = [] if args.deps: template += DEPS_TPL deps = args.deps.split(',') for dname in deps: try: package = store.get_package(dname) except Exception as ex: raise UsageError( _("Error creating package: " "dependant package %s does not exist") % dname) template_args['deps'] = deps include_files = [] include_files_devel = [] platform_include_files = {} platform_include_files_devel = {} if args.includes: includes = args.includes.split(',') if list(set(deps) & set(includes)): raise UsageError( _("Error creating package: " "param --deps intersects with --includes")) for pname in includes: try: package = store.get_package(pname) except Exception as ex: raise UsageError( _("Error creating package: " "included package %s does not exist") % pname) include_files.extend(package.files) include_files_devel.extend(package.files_devel) platform_include_files = self.merge_dict( platform_include_files, package.platform_files) platform_include_files_devel = self.merge_dict( platform_include_files_devel, package.platform_files_devel) include_files = list(set(include_files)) include_files_devel = list(set(include_files_devel)) if args.files or include_files: template += FILES_TPL files = [] if args.files: files = args.files.split(',') if include_files: files.extend(include_files) template_args['files'] = files if args.files_devel or include_files_devel: template += FILES_DEVEL_TPL files_devel = [] if args.files_devel: files_devel = args.files_devel.split(',') if include_files_devel: files_devel.extend(include_files_devel) template_args['files_devel'] = files_devel if args.platform_files or platform_include_files: template += PLATFORM_FILES_TPL platform_files = self.parse_platform_files(args.platform_files, platform_include_files) template_args['platform_files'] = platform_files if args.platform_files_devel or platform_include_files_devel: template += PLATFORM_FILES_DEVEL_TPL platform_files_devel = self.parse_platform_files( args.platform_files_devel, platform_include_files_devel) template_args['platform_files_devel'] = platform_files_devel try: f = open(filename, 'w') f.write(template % template_args) f.close() m.action( _("Package '%s' successfully created in %s") % (name, filename)) except IOError as ex: raise FatalError(_("Error creating package: %s") % ex)