def prepare(self): """remove temporary files, create the directory structure""" utils.rmtree(self.work_path) utils.make_dirs(os.path.join(self.work_iso, 'seedbank/etc/runonce.d')) utils.make_dirs(self.work_initrd) utils.run('bsdtar -C "%s" -xf "%s"' % (self.work_iso, self.iso_file)) utils.run('chmod -R u+w "%s"' % self.work_iso)
def remove(self,name,with_delete=True,with_sync=True,with_triggers=True,recursive=False): """ Remove element named 'name' from the collection """ # NOTE: with_delete isn't currently meaningful for repos # but is left in for consistancy in the API. Unused. name = name.lower() obj = self.find(name=name) if obj is not None: if with_delete: if with_triggers: self._run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/repo/pre/*") del self.listing[name] self.config.serialize_delete(self, obj) if with_delete: self.log_func("deleted repo %s" % name) if with_triggers: self._run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/repo/post/*") self._run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/change/*") path = "/var/www/cobbler/repo_mirror/%s" % obj.name if os.path.exists(path): utils.rmtree(path) if with_delete and not self.api.is_cobblerd: self.api._internal_cache_update("repo", name, remove=True) return True
def remove(self,name,with_delete=True,with_sync=True,with_triggers=True,recursive=False,logger=None): """ Remove element named 'name' from the collection """ # NOTE: with_delete isn't currently meaningful for repos # but is left in for consistancy in the API. Unused. name = name.lower() obj = self.find(name=name) if obj is not None: if with_delete: if with_triggers: utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/repo/pre/*", [], logger) del self.listing[name] self.config.serialize_delete(self, obj) if with_delete: if with_triggers: utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/repo/post/*", [], logger) utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/change/*", [], logger) path = "/var/www/cobbler/repo_mirror/%s" % obj.name if os.path.exists(path): utils.rmtree(path) return True raise CX(_("cannot delete an object that does not exist: %s") % name)
def remove(self,name,with_delete=True,with_sync=True,with_triggers=True,recursive=False,logger=None): """ Remove element named 'name' from the collection """ name = name.lower() # first see if any Groups use this distro if not recursive: for v in self.config.profiles(): if v.distro.lower() == name: raise CX(_("removal would orphan profile: %s") % v.name) obj = self.find(name=name) if obj is not None: kernel = obj.kernel if recursive: kids = obj.get_children() for k in kids: self.config.api.remove_profile(k.name, recursive=recursive, delete=with_delete, with_triggers=with_triggers, logger=logger) if with_delete: if with_triggers: utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/distro/pre/*", [], logger) if with_sync: lite_sync = action_litesync.BootLiteSync(self.config, logger=logger) lite_sync.remove_single_distro(name) del self.listing[name] self.config.serialize_delete(self, obj) if with_delete: if with_triggers: utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/delete/distro/post/*", [], logger) utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/change/*", [], logger) # look through all mirrored directories and find if any directory is holding # this particular distribution's kernel and initrd possible_storage = glob.glob("/var/www/cobbler/ks_mirror/*") path = None for storage in possible_storage: if os.path.dirname(obj.kernel).find(storage) != -1: path = storage continue # if we found a mirrored path above, we can delete the mirrored storage /if/ # no other object is using the same mirrored storage. if with_delete and path is not None and os.path.exists(path) and kernel.find("/var/www/cobbler") != -1: # this distro was originally imported so we know we can clean up the associated # storage as long as nothing else is also using this storage. found = False distros = self.api.distros() for d in distros: if d.kernel.find(path) != -1: found = True if not found: utils.rmtree(path) return True
def remove_single_profile(self, name, rebuild_menu=True): # delete profiles/$name file in webdir utils.rmfile(os.path.join(self.settings.webdir, "profiles", name)) # delete contents on kickstarts/$name directory in webdir utils.rmtree(os.path.join(self.settings.webdir, "kickstarts", name)) if rebuild_menu: self.sync.pxegen.make_pxe_menu()
def extract(self, ex_path, version): if os.path.exists(ex_path): utils.rmtree(ex_path, ignore_errors=True) path = self.save_file_path(version) file = self.extract_class(path, *self.extract_args) # currently, python's extracting mechanism for zipfile doesn't # copy file permissions, resulting in a binary that # that doesn't work. Copied from a patch here: # http://bugs.python.org/file34873/issue15795_cleaned.patch if path.endswith('.zip'): members = file.namelist() for zipinfo in members: minfo = file.getinfo(zipinfo) target = file.extract(zipinfo, ex_path) mode = minfo.external_attr >> 16 & 0x1FF os.chmod(target, mode) else: file.extractall(ex_path) if path.endswith('.tar.gz'): dir_name = utils.path_join(ex_path, os.path.basename(path).replace('.tar.gz','')) else: dir_name = utils.path_join(ex_path, os.path.basename(path).replace('.zip','')) if os.path.exists(dir_name): for p in os.listdir(dir_name): abs_file = utils.path_join(dir_name, p) utils.move(abs_file, ex_path) utils.rmtree(dir_name, ignore_errors=True)
def clean_trees(self): """ Delete any previously built pxelinux.cfg tree and virt tree info and then create directories. Note: for SELinux reasons, some information goes in /tftpboot, some in /var/www/cobbler and some must be duplicated in both. This is because PXE needs tftp, and auto-kickstart and Virt operations need http. Only the kernel and initrd images are duplicated, which is unfortunate, though SELinux won't let me give them two contexts, so symlinks are not a solution. *Otherwise* duplication is minimal. """ # clean out parts of webdir and all of /tftpboot/images and /tftpboot/pxelinux.cfg for x in os.listdir(self.settings.webdir): path = os.path.join(self.settings.webdir, x) if os.path.isfile(path): if not x.endswith(".py"): utils.rmfile(path, logger=self.logger) if os.path.isdir(path): if x not in ["aux", "web", "webui", "localmirror", "repo_mirror", "ks_mirror", "images", "links", "pub", "repo_profile", "repo_system", "svc", "rendered", ".link_cache"]: # delete directories that shouldn't exist utils.rmtree(path, logger=self.logger) if x in ["kickstarts", "kickstarts_sys", "images", "systems", "distros", "profiles", "repo_profile", "repo_system", "rendered"]: # clean out directory contents utils.rmtree_contents(path, logger=self.logger) # self.make_tftpboot() utils.rmtree_contents(self.pxelinux_dir, logger=self.logger) utils.rmtree_contents(self.grub_dir, logger=self.logger) utils.rmtree_contents(self.images_dir, logger=self.logger) utils.rmtree_contents(self.yaboot_bin_dir, logger=self.logger) utils.rmtree_contents(self.yaboot_cfg_dir, logger=self.logger) utils.rmtree_contents(self.rendered_dir, logger=self.logger)
def remove_single_distro(self, name): bootloc = utils.tftpboot_location() # delete contents of images/$name directory in webdir utils.rmtree(os.path.join(self.settings.webdir, "images", name)) # delete contents of images/$name in tftpboot utils.rmtree(os.path.join(bootloc, "images", name)) # delete potential symlink to tree in webdir/links utils.rmfile(os.path.join(self.settings.webdir, "links", name))
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.mkdir(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ "cmake", "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=False", "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd, log) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) if run_process([self.make_path], log) != 0: raise DistutilsSetupError("Error compiling " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"], log) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
def command_base(): config.TESTING = True dpath = utils.get_data_path('') if os.path.exists(dpath): utils.rmtree(dpath) base = CommandBase() base._project_name = 'Test' return base
def _extract(self, prefix, files, src, dst, target): """extract files to the seedbank temp directory and move those""" archive = os.path.join(dst, os.path.basename(src)) files = (os.path.join(prefix, file_name) for file_name in files) temp_manage = os.path.join(self.temp, 'manage') if os.path.isdir(temp_manage): utils.rmtree(temp_manage) utils.make_dirs(temp_manage) utils.untar_files(archive, files, temp_manage) self.copy_dir_contents(temp_manage, target) utils.rmtree(temp_manage)
def _debian_firmware(self, name): """integrate Debian non free firmware""" temp_initrd = os.path.join(self.temp, 'initrd') initrd = os.path.join(self.cfg['paths']['tftpboot'], 'seedbank', name, 'initrd.gz') utils.make_dirs(temp_initrd) utils.initrd_extract(temp_initrd, initrd) dst = os.path.join(self.temp, 'initrd/lib/firmware') self._add_firmware(name, dst) utils.initrd_create(temp_initrd, initrd) utils.rmtree(temp_initrd)
def remove(self): self.clear_cache() name = self.name for server in self.iter_servers(): if server.status: server.stop() server.remove_org(self.id) utils.rmtree(self.path) LogEntry(message='Deleted organization "%s".' % name) Event(type=ORGS_UPDATED)
def _initialize(self): self._make_dirs() try: self.ca_cert = User(self, type=CERT_CA) cache_db.set_add("orgs", self.id) self.commit() LogEntry(message='Created new organization "%s".' % self.name) except: logger.exception("Failed to create organization. %r" % {"org_id": self.id}) self.clear_cache() utils.rmtree(self.path) raise
def _remove_netboot(self, name): """remove a netboot image and if defined the related firmware files""" path = os.path.join(self.cfg['paths']['tftpboot'], 'seedbank', name) if not utils.rmtree(path): logging.info('release "%s" has not been installed', name) else: utils.rmtree(os.path.join(self.cfg['paths']['archives'], name)) release = name.split('-')[1] firmware = os.path.join(self.cfg['paths']['archives'], 'firmware-' + release) if not utils.rmtree(firmware): logging.info('firmware "%s" not found, nothing to do', firmware)
def prepare(self, values): """ apply templates to all the .sb_template files and build the fix_perms.sh script from the permissions file""" values.update(self.cfg['pxe']) utils.rmtree(self.dst) utils.copy_tree(self.path, self.dst) for root, _, files in os.walk(self.dst): for file_name in files: if file_name.endswith('.sb_template'): file_name = os.path.join(root, file_name) utils.write_template(values, file_name) utils.file_move(file_name, os.path.splitext(file_name)[0])
def remove(self): logger.debug('Removing server. %r' % { 'server_id': self.id, }) if self.status: self.force_stop() utils.rmtree(self.path) call_buffer = _call_buffers.pop(self.id, None) if call_buffer: call_buffer.stop_waiter()
def remove(self): logger.debug('Removing server. %r' % { 'server_id': self.id, }) self.clear_cache() name = self.name if self.status: self.force_stop(True) self._remove_primary_user() utils.rmtree(self.path) LogEntry(message='Deleted server "%s".' % name) Event(type=SERVERS_UPDATED)
def _add_firmware(self, name, dst): """download, extract and copy Debian non free firmware""" distribution, release, _ = name.split('-') path = '-'.join(('firmware', distribution, release)) archive_dst = os.path.join(self.cfg['paths']['archives'], path) temp_firmware = os.path.join(self.temp, 'firmware') archive = os.path.join(archive_dst, 'firmware.tar.gz') url = self.cfg[distribution]['url_firmware'].replace('${release}', release) self._download(url, archive_dst) utils.untar(archive, temp_firmware) self._extract_debs(temp_firmware) src = os.path.join(temp_firmware, 'temp', 'lib/firmware') utils.file_move(src, dst) utils.rmtree(temp_firmware)
def _disable_usb(self, temp_initrd): """remove usb storage support from initrd""" for root, _, _ in os.walk(temp_initrd): if 'kernel/drivers/usb/storage' in root: if utils.rmtree(root): logging.info('usb storage support has been disabled in the ' 'initrd image (fixes "root partition not found" error)')
def _upgrade_0_10_5(self): logger.debug("Upgrading org to v0.10.5... %r" % {"org_id": self.id}) for path in ( os.path.join(self.path, INDEX_NAME), os.path.join(self.path, INDEX_NAME + ".old"), os.path.join(self.path, INDEX_ATTR_NAME), os.path.join(self.path, INDEX_ATTR_NAME + ".old"), os.path.join(self.path, SERIAL_NAME), os.path.join(self.path, SERIAL_NAME + ".old"), os.path.join(self.path, "ca.crl"), ): try: os.remove(path) except OSError: pass utils.rmtree(os.path.join(self.path, "indexed_certs"))
def build_key_archive(self): temp_path = app_server.get_temp_path() key_archive_path = os.path.join(temp_path, '%s.tar' % self.id) try: os.makedirs(temp_path) tar_file = tarfile.open(key_archive_path, 'w') try: for server in self.org.iter_servers(): server_conf_path = os.path.join(self.org.path, TEMP_DIR, '%s_%s.ovpn' % (self.id, server.id)) server_conf_arcname = '%s_%s_%s.ovpn' % ( self.org.name, self.name, server.name) server.generate_ca_cert() client_conf = OVPN_INLINE_CLIENT_CONF % ( self._get_key_info_str( self.name, self.org.name, server.name), server.protocol, server.public_address, server.port, ) if server.otp_auth: client_conf += 'auth-user-pass\n' client_conf += '<ca>\n%s\n</ca>\n' % utils.get_cert_block( server.ca_cert_path) client_conf += '<cert>\n%s\n' + \ '</cert>\n' % utils.get_cert_block(self.cert_path) client_conf += '<key>\n%s\n</key>\n' % open( self.key_path).read().strip() with open(server_conf_path, 'w') as ovpn_conf: os.chmod(server_conf_path, 0600) ovpn_conf.write(client_conf) tar_file.add(server_conf_path, arcname=server_conf_arcname) os.remove(server_conf_path) finally: tar_file.close() with open(key_archive_path, 'r') as archive_file: key_archive = archive_file.read() finally: utils.rmtree(temp_path) return key_archive
def _initialize(self): logger.debug('Initialize new server. %r' % { 'server_id': self.id, }) os.makedirs(os.path.join(self.path, TEMP_DIR)) try: self._generate_dh_param() cache_db.set_add('servers', '%s_%s' % (self.id, self.type)) self.commit() LogEntry(message='Created new server "%s".' % self.name) except: logger.exception('Failed to create server. %r' % { 'server_id': self.id, }) self.clear_cache() utils.rmtree(self.path) raise
def _upgrade_0_10_5(self): logger.debug('Upgrading org to v0.10.5... %r' % { 'org_id': self.id, }) for path in ( os.path.join(self.path, INDEX_NAME), os.path.join(self.path, INDEX_NAME + '.old'), os.path.join(self.path, INDEX_ATTR_NAME), os.path.join(self.path, INDEX_ATTR_NAME + '.old'), os.path.join(self.path, SERIAL_NAME), os.path.join(self.path, SERIAL_NAME + '.old'), os.path.join(self.path, 'ca.crl'), ): try: os.remove(path) except OSError: pass utils.rmtree(os.path.join(self.path, 'indexed_certs'))
def remove(self): logger.debug('Removing server. %r' % { 'server_id': self.id, }) self._clear_list_cache() name = self.name orgs = list(self.iter_orgs()) if self.status: self.force_stop(True) self.clear_cache() self._remove_primary_user() utils.rmtree(self.path) LogEntry(message='Deleted server "%s".' % name) Event(type=SERVERS_UPDATED) for org in orgs: Event(type=USERS_UPDATED, resource_id=org.id)
def generate_dh_param(self): logger.debug('Generating server dh params. %r' % { 'server_id': self.id, }) temp_path = app_server.get_temp_path() dh_param_path = os.path.join(temp_path, DH_PARAM_NAME) try: os.makedirs(temp_path) args = [ 'openssl', 'dhparam', '-out', dh_param_path, str(self.dh_param_bits), ] subprocess.check_call(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) self.read_file('dh_params', dh_param_path) finally: utils.rmtree(temp_path)
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) skipflag_file = module_build_dir + '-skip' if os.path.exists(skipflag_file): log.info("Skipping %s because %s exists" % (extension, skipflag_file)) return if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.makedirs(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=%s" % self.build_tests, "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if sys.platform == 'win32': cmake_cmd.append("-DCMAKE_DEBUG_POSTFIX=_d") if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") if sys.platform == 'darwin': cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=' + self.qtinfo.headers_dir) if OPTION_OSXARCH: # also tell cmake which architecture to use cmake_cmd.append("-DCMAKE_OSX_ARCHITECTURES:STRING={}".format( OPTION_OSXARCH)) log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) cmd_make = [self.make_path] if OPTION_JOBS: cmd_make.append(OPTION_JOBS) if run_process(cmd_make) != 0: raise DistutilsSetupError("Error compiling " + extension) if extension.lower() == "shiboken": log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"]) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"]) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
def make_output_dirs(self): self.output_err = '' try: self.progress_text = 'Removing old output directory...\n' output_dir = utils.path_join(self.output_dir(), self.project_name()) if os.path.exists(output_dir): utils.rmtree(output_dir, ignore_errors=True) temp_dir = utils.path_join(TEMP_DIR, 'webexectemp') if os.path.exists(temp_dir): utils.rmtree(temp_dir, ignore_errors=True) self.progress_text = 'Making new directories...\n' if not os.path.exists(output_dir): os.makedirs(output_dir) os.makedirs(temp_dir) self.copy_files_to_project_folder() json_file = utils.path_join(self.project_dir(), 'package.json') global_json = utils.get_data_file_path('files/global.json') if self.output_package_json: with codecs.open(json_file, 'w+', encoding='utf-8') as f: f.write(self.generate_json()) with codecs.open(global_json, 'w+', encoding='utf-8') as f: f.write(self.generate_json(global_json=True)) zip_file = utils.path_join(temp_dir, self.project_name()+'.nw') app_nw_folder = utils.path_join(temp_dir, self.project_name()+'.nwf') utils.copytree(self.project_dir(), app_nw_folder, ignore=shutil.ignore_patterns(output_dir)) zip_files(zip_file, self.project_dir(), exclude_paths=[output_dir]) for ex_setting in self.settings['export_settings'].values(): if ex_setting.value: self.progress_text = '\n' name = ex_setting.display_name self.progress_text = u'Making files for {}...'.format(name) export_dest = utils.path_join(output_dir, ex_setting.name) versions = re.findall('(\d+)\.(\d+)\.(\d+)', self.selected_version())[0] minor = int(versions[1]) if minor >= 12: export_dest = export_dest.replace('node-webkit', 'nwjs') if os.path.exists(export_dest): utils.rmtree(export_dest, ignore_errors=True) # shutil will make the directory for us utils.copytree(get_data_path('files/'+ex_setting.name), export_dest, ignore=shutil.ignore_patterns('place_holder.txt')) utils.rmtree(get_data_path('files/'+ex_setting.name), ignore_errors=True) self.progress_text += '.' if 'mac' in ex_setting.name: uncomp_setting = self.get_setting('uncompressed_folder') uncompressed = uncomp_setting.value app_path = utils.path_join(export_dest, self.project_name()+'.app') try: utils.move(utils.path_join(export_dest, 'nwjs.app'), app_path) except IOError: utils.move(utils.path_join(export_dest, 'node-webkit.app'), app_path) plist_path = utils.path_join(app_path, 'Contents', 'Info.plist') plist_dict = plistlib.readPlist(plist_path) plist_dict['CFBundleDisplayName'] = self.project_name() plist_dict['CFBundleName'] = self.project_name() version_setting = self.get_setting('version') plist_dict['CFBundleShortVersionString'] = version_setting.value plist_dict['CFBundleVersion'] = version_setting.value plistlib.writePlist(plist_dict, plist_path) self.progress_text += '.' app_nw_res = utils.path_join(app_path, 'Contents', 'Resources', 'app.nw') if uncompressed: utils.copytree(app_nw_folder, app_nw_res) else: utils.copy(zip_file, app_nw_res) self.create_icns_for_app(utils.path_join(app_path, 'Contents', 'Resources', 'nw.icns')) self.progress_text += '.' else: ext = '' windows = False if 'windows' in ex_setting.name: ext = '.exe' windows = True nw_path = utils.path_join(export_dest, ex_setting.dest_files[0]) if windows: self.replace_icon_in_exe(nw_path) self.compress_nw(nw_path) dest_binary_path = utils.path_join(export_dest, self.project_name() + ext) if 'linux' in ex_setting.name: self.make_desktop_file(dest_binary_path, export_dest) join_files(dest_binary_path, nw_path, zip_file) sevenfivefive = (stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH) os.chmod(dest_binary_path, sevenfivefive) self.progress_text += '.' if os.path.exists(nw_path): os.remove(nw_path) except Exception: error = u''.join([unicode(x) for x in traceback.format_exception(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2])]) self.logger.error(error) self.output_err += error finally: utils.rmtree(temp_dir, ignore_errors=True)
def prepare_packages_posix(self, vars): executables = [] if sys.platform.startswith('linux'): so_ext = '.so' so_star = so_ext + '.*' elif sys.platform == 'darwin': so_ext = '.dylib' so_star = so_ext # <build>/shiboken/doc/html/* -> <setup>/PySide/docs/shiboken copydir("{build_dir}/shiboken/doc/html", "{dist_dir}/PySide/docs/shiboken", force=False, vars=vars) # <install>/lib/site-packages/PySide/* -> <setup>/PySide copydir("{site_packages_dir}/PySide", "{dist_dir}/PySide", vars=vars) # <install>/lib/site-packages/shiboken.so -> <setup>/PySide/shiboken.so copyfile("{site_packages_dir}/shiboken.so", "{dist_dir}/PySide/shiboken.so", vars=vars) # <install>/lib/site-packages/pysideuic/* -> <setup>/pysideuic copydir("{site_packages_dir}/pysideuic", "{dist_dir}/pysideuic", force=False, vars=vars) if sys.version_info[0] > 2: rmtree("{dist_dir}/pysideuic/port_v2".format(**vars)) else: rmtree("{dist_dir}/pysideuic/port_v3".format(**vars)) # <install>/bin/pyside-uic -> PySide/scripts/uic.py makefile("{dist_dir}/PySide/scripts/__init__.py", vars=vars) copyfile("{install_dir}/bin/pyside-uic", "{dist_dir}/PySide/scripts/uic.py", force=False, vars=vars) # <install>/bin/* -> PySide/ executables.extend( copydir("{install_dir}/bin/", "{dist_dir}/PySide", filter=[ "pyside-lupdate", "pyside-rcc", "shiboken", ], recursive=False, vars=vars)) # <install>/lib/lib* -> PySide/ copydir("{install_dir}/lib/", "{dist_dir}/PySide", filter=[ "libpyside*" + so_star, "libshiboken*" + so_star, ], recursive=False, vars=vars) # <install>/share/PySide/typesystems/* -> <setup>/PySide/typesystems copydir("{install_dir}/share/PySide/typesystems", "{dist_dir}/PySide/typesystems", vars=vars) # <install>/include/* -> <setup>/PySide/include copydir("{install_dir}/include", "{dist_dir}/PySide/include", vars=vars) if not OPTION_NOEXAMPLES: # <sources>/pyside-examples/examples/* -> <setup>/PySide/examples copydir("{sources_dir}/pyside-examples/examples", "{dist_dir}/PySide/examples", force=False, vars=vars) # Re-generate examples Qt resource files for Python 3 compatibility if sys.version_info[0] == 3: examples_path = "{dist_dir}/PySide/examples".format(**vars) pyside_rcc_path = "{install_dir}/bin/pyside-rcc".format(**vars) pyside_rcc_options = '-py3' regenerate_qt_resources(examples_path, pyside_rcc_path, pyside_rcc_options) # Copy Qt libs to package if OPTION_STANDALONE: if sys.platform == 'darwin': raise RuntimeError('--standalone not yet supported for OSX') # <qt>/bin/* -> <setup>/PySide executables.extend( copydir("{qt_bin_dir}", "{dist_dir}/PySide", filter=[ "designer", "linguist", "lrelease", "lupdate", "lconvert", ], recursive=False, vars=vars)) # <qt>/lib/* -> <setup>/PySide copydir("{qt_lib_dir}", "{dist_dir}/PySide", filter=[ "libQt*.so.?", "libphonon.so.?", ], recursive=False, vars=vars) # <qt>/plugins/* -> <setup>/PySide/plugins copydir("{qt_plugins_dir}", "{dist_dir}/PySide/plugins", filter=["*.so"], vars=vars) # <qt>/imports/* -> <setup>/PySide/imports if float(vars["qt_version"][:3]) > 4.6: copydir("{qt_imports_dir}", "{dist_dir}/PySide/imports", filter=["qmldir", "*.so"], force=False, vars=vars) # <qt>/translations/* -> <setup>/PySide/translations copydir("{qt_translations_dir}", "{dist_dir}/PySide/translations", filter=["*.qm"], vars=vars) # Update rpath to $ORIGIN if sys.platform.startswith('linux') or sys.platform.startswith( 'darwin'): self.update_rpath("{dist_dir}/PySide".format(**vars), executables)
def build_extension(self, extension): log.info("Building module %s..." % extension) # Prepare folders os.chdir(self.build_dir) module_build_dir = os.path.join(self.build_dir, extension) if os.path.exists(module_build_dir): log.info("Deleting module build folder %s..." % module_build_dir) rmtree(module_build_dir) log.info("Creating module build folder %s..." % module_build_dir) os.mkdir(module_build_dir) os.chdir(module_build_dir) module_src_dir = os.path.join(self.sources_dir, extension) # Build module cmake_cmd = [ OPTION_CMAKE, "-G", self.make_generator, "-DQT_QMAKE_EXECUTABLE=%s" % self.qmake_path, "-DBUILD_TESTS=False", "-DDISABLE_DOCSTRINGS=True", "-DCMAKE_BUILD_TYPE=%s" % self.build_type, "-DCMAKE_INSTALL_PREFIX=%s" % self.install_dir, module_src_dir ] if sys.version_info[0] > 2: cmake_cmd.append("-DPYTHON3_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON3_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON3_DBG_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON3_DEBUG_LIBRARY=%s" % self.py_library) else: cmake_cmd.append("-DPYTHON_EXECUTABLE=%s" % self.py_executable) cmake_cmd.append("-DPYTHON_INCLUDE_DIR=%s" % self.py_include_dir) cmake_cmd.append("-DPYTHON_LIBRARY=%s" % self.py_library) if self.build_type.lower() == 'debug': cmake_cmd.append("-DPYTHON_DEBUG_LIBRARY=%s" % self.py_library) if sys.platform == "win32" and self.build_type.lower() == 'debug': cmake_cmd.append("-DCMAKE_DEBUG_POSTFIX=_d") if extension.lower() == "shiboken": cmake_cmd.append("-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=yes") if sys.version_info[0] > 2: cmake_cmd.append("-DUSE_PYTHON3=ON") elif sys.platform == 'darwin': # Work round cmake include problem # http://neilweisenfeld.com/wp/120/building-pyside-on-the-mac # https://groups.google.com/forum/#!msg/pyside/xciZZ4Hm2j8/CUmqfJptOwoJ cmake_cmd.append('-DALTERNATIVE_QT_INCLUDE_DIR=/Library/Frameworks') log.info("Configuring module %s (%s)..." % (extension, module_src_dir)) if run_process(cmake_cmd, log) != 0: raise DistutilsSetupError("Error configuring " + extension) log.info("Compiling module %s..." % extension) if run_process([self.make_path], log) != 0: raise DistutilsSetupError("Error compiling " + extension) if extension.lower() == "shiboken": log.info("Generating Shiboken documentation %s..." % extension) if run_process([self.make_path, "doc"], log) != 0: raise DistutilsSetupError("Error generating documentation " + extension) log.info("Installing module %s..." % extension) if run_process([self.make_path, "install/fast"], log) != 0: raise DistutilsSetupError("Error pseudo installing " + extension) os.chdir(self.script_dir)
module_name = m[0] module_version = m[1] print("Checking out submodule %s to branch %s" % (module_name, module_version)) module_dir = os.path.join(submodules_dir, module_name) os.chdir(module_dir) git_checkout_cmd = ["git", "checkout", module_version] if run_process(git_checkout_cmd) != 0: raise DistutilsSetupError("Failed to initialize the git submodule %s" % module_name) os.chdir(script_dir) # Clean up temp and package folders for n in ["pyside_package", "build", "PySide-%s" % __version__]: d = os.path.join(script_dir, n) if os.path.isdir(d): print("Removing %s" % d) rmtree(d) # Prepare package folders for pkg in ["pyside_package/PySide", "pyside_package/pysideuic"]: pkg_dir = os.path.join(script_dir, pkg) os.makedirs(pkg_dir) class pyside_install(_install): def run(self): _install.run(self) # Custom script we run at the end of installing - this is the same script # run by bdist_wininst # If self.root has a value, it means we are being "installed" into # some other directory than Python itself (eg, into a temp directory # for bdist_wininst to use) - in which case we must *not* run our # installer
def prepare_packages_win32(self, vars): pdbs = ['*.pdb'] if self.debug or self.build_type == 'RelWithDebInfo' else [] # <install>/lib/site-packages/PySide2/* -> <setup>/PySide2 copydir( "{site_packages_dir}/PySide2", "{dist_dir}/PySide2", vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <build>/pyside2/PySide2/*.pdb -> <setup>/PySide2 copydir( "{build_dir}/pyside2/PySide2", "{dist_dir}/PySide2", filter=pdbs, recursive=False, vars=vars) # <build>/shiboken2/doc/html/* -> <setup>/PySide2/docs/shiboken2 copydir( "{build_dir}/shiboken2/doc/html", "{dist_dir}/PySide2/docs/shiboken2", force=False, vars=vars) # <install>/lib/site-packages/shiboken2.pyd -> <setup>/PySide2/shiboken2.pyd copyfile( "{site_packages_dir}/shiboken2{dbgPostfix}.pyd", "{dist_dir}/PySide2/shiboken2{dbgPostfix}.pyd", vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': copyfile( "{build_dir}/shiboken2/shibokenmodule/shiboken2{dbgPostfix}.pdb", "{dist_dir}/PySide2/shiboken2{dbgPostfix}.pdb", vars=vars) # <install>/lib/site-packages/pysideuic/* -> <setup>/pysideuic copydir( "{site_packages_dir}/pysideuic", "{dist_dir}/pysideuic", force=False, vars=vars) if sys.version_info[0] > 2: rmtree("{dist_dir}/pysideuic/port_v2".format(**vars)) else: rmtree("{dist_dir}/pysideuic/port_v3".format(**vars)) # <install>/bin/pyside2-uic -> PySide2/scripts/uic.py makefile( "{dist_dir}/PySide2/scripts/__init__.py", vars=vars) copyfile( "{install_dir}/bin/pyside2-uic", "{dist_dir}/PySide2/scripts/uic.py", force=False, vars=vars) # <install>/bin/*.exe,*.dll,*.pdb -> PySide2/ copydir( "{install_dir}/bin/", "{dist_dir}/PySide2", filter=["*.exe", "*.dll"] + pdbs, recursive=False, vars=vars) # <install>/lib/*.lib -> PySide2/ copydir( "{install_dir}/lib/", "{dist_dir}/PySide2", filter=["*.lib"], recursive=False, vars=vars) # <install>/share/PySide2/typesystems/* -> <setup>/PySide2/typesystems copydir( "{install_dir}/share/PySide2/typesystems", "{dist_dir}/PySide2/typesystems", vars=vars) # <install>/include/* -> <setup>/PySide2/include copydir( "{install_dir}/include", "{dist_dir}/PySide2/include", vars=vars) if not OPTION_NOEXAMPLES: # <sources>/pyside2-examples/examples/* -> <setup>/PySide2/examples folder = get_extension_folder('pyside2-examples') copydir( "{sources_dir}/%s/examples" % folder, "{dist_dir}/PySide2/examples", force=False, vars=vars) # Re-generate examples Qt resource files for Python 3 compatibility if sys.version_info[0] == 3: examples_path = "{dist_dir}/PySide2/examples".format(**vars) pyside_rcc_path = "{install_dir}/bin/pyside2-rcc".format(**vars) pyside_rcc_options = '-py3' regenerate_qt_resources(examples_path, pyside_rcc_path, pyside_rcc_options) # <ssl_libs>/* -> <setup>/PySide2/openssl copydir("{ssl_libs_dir}", "{dist_dir}/PySide2/openssl", filter=[ "libeay32.dll", "ssleay32.dll"], force=False, vars=vars) # <qt>/bin/*.dll -> <setup>/PySide2 copydir("{qt_bin_dir}", "{dist_dir}/PySide2", filter=[ "*.dll", "designer.exe", "linguist.exe", "lrelease.exe", "lupdate.exe", "lconvert.exe"], ignore=["*d4.dll"], recursive=False, vars=vars) if self.debug: # <qt>/bin/*d4.dll -> <setup>/PySide2 copydir("{qt_bin_dir}", "{dist_dir}/PySide2", filter=["*d4.dll"] + pdbs, recursive=False, vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <qt>/lib/*.pdb -> <setup>/PySide2 copydir("{qt_lib_dir}", "{dist_dir}/PySide2", filter=["*.pdb"], recursive=False, vars=vars) # I think these are the qt-mobility DLLs, at least some are, # so let's copy them too # <qt>/lib/*.dll -> <setup>/PySide2 copydir("{qt_lib_dir}", "{dist_dir}/PySide2", filter=["*.dll"], ignore=["*d?.dll"], recursive=False, vars=vars) if self.debug: # <qt>/lib/*d4.dll -> <setup>/PySide2 copydir("{qt_lib_dir}", "{dist_dir}/PySide2", filter=["*d?.dll"], recursive=False, vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <qt>/lib/*pdb -> <setup>/PySide2 copydir("{qt_lib_dir}", "{dist_dir}/PySide2", filter=pdbs, recursive=False, vars=vars) # <qt>/plugins/* -> <setup>/PySide2/plugins copydir("{qt_plugins_dir}", "{dist_dir}/PySide2/plugins", filter=["*.dll"] + pdbs, vars=vars) # <qt>/imports/* -> <setup>/PySide2/imports copydir("{qt_imports_dir}", "{dist_dir}/PySide2/imports", filter=["qmldir", "*.dll"] + pdbs, vars=vars) # <qt>/translations/* -> <setup>/PySide2/translations copydir("{qt_translations_dir}", "{dist_dir}/PySide2/translations", filter=["*.qm"], vars=vars) # pdb files for libshiboken and libpyside if self.debug or self.build_type == 'RelWithDebInfo': # XXX dbgPostfix gives problems - the structure in shiboken2/data should be re-written! copyfile( "{build_dir}/shiboken2/libshiboken/shiboken2-python{py_version}.pdb", "{dist_dir}/PySide2/shiboken2-python{py_version}.pdb", # omitted dbgPostfix vars=vars) copyfile( "{build_dir}/pyside2/libpyside/pyside2-python{py_version}.pdb", "{dist_dir}/PySide2/pyside2-python{py_version}.pdb", # omitted dbgPostfix vars=vars)
def prepare_packages_win32(self, vars): pdbs = ['*.pdb' ] if self.debug or self.build_type == 'RelWithDebInfo' else [] # <install>/lib/site-packages/PySide/* -> <setup>/PySide copydir("{site_packages_dir}/PySide", "{dist_dir}/PySide", vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <build>/pyside/PySide/*.pdb -> <setup>/PySide copydir("{build_dir}/pyside/PySide", "{dist_dir}/PySide", filter=pdbs, recursive=False, vars=vars) # <build>/shiboken/doc/html/* -> <setup>/PySide/docs/shiboken copydir("{build_dir}/shiboken/doc/html", "{dist_dir}/PySide/docs/shiboken", force=False, vars=vars) # <install>/lib/site-packages/shiboken.pyd -> <setup>/PySide/shiboken.pyd copyfile("{site_packages_dir}/shiboken{dbgPostfix}.pyd", "{dist_dir}/PySide/shiboken{dbgPostfix}.pyd", vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': copyfile( "{build_dir}/shiboken/shibokenmodule/shiboken{dbgPostfix}.pdb", "{dist_dir}/PySide/shiboken{dbgPostfix}.pdb", vars=vars) # <install>/lib/site-packages/pysideuic/* -> <setup>/pysideuic copydir("{site_packages_dir}/pysideuic", "{dist_dir}/pysideuic", force=False, vars=vars) if sys.version_info[0] > 2: rmtree("{dist_dir}/pysideuic/port_v2".format(**vars)) else: rmtree("{dist_dir}/pysideuic/port_v3".format(**vars)) # <install>/bin/pyside-uic -> PySide/scripts/uic.py makefile("{dist_dir}/PySide/scripts/__init__.py", vars=vars) copyfile("{install_dir}/bin/pyside-uic", "{dist_dir}/PySide/scripts/uic.py", force=False, vars=vars) # <install>/bin/*.exe,*.dll,*.pdb -> PySide/ copydir("{install_dir}/bin/", "{dist_dir}/PySide", filter=["*.exe", "*.dll"] + pdbs, recursive=False, vars=vars) # <install>/lib/*.lib -> PySide/ copydir("{install_dir}/lib/", "{dist_dir}/PySide", filter=["*.lib"], recursive=False, vars=vars) # <install>/share/PySide/typesystems/* -> <setup>/PySide/typesystems copydir("{install_dir}/share/PySide/typesystems", "{dist_dir}/PySide/typesystems", vars=vars) # <install>/include/* -> <setup>/PySide/include copydir("{install_dir}/include", "{dist_dir}/PySide/include", vars=vars) if not OPTION_NOEXAMPLES: # <sources>/pyside-examples/examples/* -> <setup>/PySide/examples copydir("{sources_dir}/pyside-examples/examples", "{dist_dir}/PySide/examples", force=False, vars=vars) # Re-generate examples Qt resource files for Python 3 compatibility if sys.version_info[0] == 3: examples_path = "{dist_dir}/PySide/examples".format(**vars) pyside_rcc_path = "{install_dir}/bin/pyside-rcc".format(**vars) pyside_rcc_options = '-py3' regenerate_qt_resources(examples_path, pyside_rcc_path, pyside_rcc_options) # <ssl_libs>/* -> <setup>/PySide/openssl copydir("{ssl_libs_dir}", "{dist_dir}/PySide/openssl", filter=["libeay32.dll", "ssleay32.dll"], force=False, vars=vars) # <qt>/bin/*.dll -> <setup>/PySide copydir("{qt_bin_dir}", "{dist_dir}/PySide", filter=[ "*.dll", "designer.exe", "linguist.exe", "lrelease.exe", "lupdate.exe", "lconvert.exe" ], ignore=["*d4.dll"], recursive=False, vars=vars) if self.debug: # <qt>/bin/*d4.dll -> <setup>/PySide copydir("{qt_bin_dir}", "{dist_dir}/PySide", filter=["*d4.dll"] + pdbs, recursive=False, vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <qt>/lib/*.pdb -> <setup>/PySide copydir("{qt_lib_dir}", "{dist_dir}/PySide", filter=["*.pdb"], recursive=False, vars=vars) # I think these are the qt-mobility DLLs, at least some are, # so let's copy them too # <qt>/lib/*.dll -> <setup>/PySide copydir("{qt_lib_dir}", "{dist_dir}/PySide", filter=["*.dll"], ignore=["*d?.dll"], recursive=False, vars=vars) if self.debug: # <qt>/lib/*d4.dll -> <setup>/PySide copydir("{qt_lib_dir}", "{dist_dir}/PySide", filter=["*d?.dll"], recursive=False, vars=vars) if self.debug or self.build_type == 'RelWithDebInfo': # <qt>/lib/*pdb -> <setup>/PySide copydir("{qt_lib_dir}", "{dist_dir}/PySide", filter=pdbs, recursive=False, vars=vars) # <qt>/plugins/* -> <setup>/PySide/plugins copydir("{qt_plugins_dir}", "{dist_dir}/PySide/plugins", filter=["*.dll"] + pdbs, vars=vars) # <qt>/imports/* -> <setup>/PySide/imports copydir("{qt_imports_dir}", "{dist_dir}/PySide/imports", filter=["qmldir", "*.dll"] + pdbs, vars=vars) # <qt>/translations/* -> <setup>/PySide/translations copydir("{qt_translations_dir}", "{dist_dir}/PySide/translations", filter=["*.qm"], vars=vars) # pdb files for libshiboken and libpyside if self.debug or self.build_type == 'RelWithDebInfo': copyfile( "{build_dir}/shiboken/libshiboken/shiboken-python{py_version}{dbgPostfix}.pdb", "{dist_dir}/PySide/shiboken-python{py_version}{dbgPostfix}.pdb", vars=vars) copyfile( "{build_dir}/pyside/libpyside/pyside-python{py_version}{dbgPostfix}.pdb", "{dist_dir}/PySide/pyside-python{py_version}{dbgPostfix}.pdb", vars=vars)
def remove(self, name, with_delete=True, with_sync=True, with_triggers=True, recursive=False, logger=None): """ Remove element named 'name' from the collection """ name = name.lower() # first see if any Groups use this distro if not recursive: for v in self.config.profiles(): if v.distro and v.distro.lower() == name: raise CX(_("removal would orphan profile: %s") % v.name) obj = self.find(name=name) if obj is not None: kernel = obj.kernel if recursive: kids = obj.get_children() for k in kids: self.config.api.remove_profile(k.name, recursive=recursive, delete=with_delete, with_triggers=with_triggers, logger=logger) if with_delete: if with_triggers: utils.run_triggers( self.config.api, obj, "/var/lib/cobbler/triggers/delete/distro/pre/*", [], logger) if with_sync: lite_sync = action_litesync.BootLiteSync(self.config, logger=logger) lite_sync.remove_single_distro(name) self.lock.acquire() try: del self.listing[name] finally: self.lock.release() self.config.serialize_delete(self, obj) if with_delete: if with_triggers: utils.run_triggers( self.config.api, obj, "/var/lib/cobbler/triggers/delete/distro/post/*", [], logger) utils.run_triggers(self.config.api, obj, "/var/lib/cobbler/triggers/change/*", [], logger) # look through all mirrored directories and find if any directory is holding # this particular distribution's kernel and initrd settings = self.config.settings() possible_storage = glob.glob(settings.webdir + "/ks_mirror/*") path = None for storage in possible_storage: if os.path.dirname(obj.kernel).find(storage) != -1: path = storage continue # if we found a mirrored path above, we can delete the mirrored storage /if/ # no other object is using the same mirrored storage. if with_delete and path is not None and os.path.exists( path) and kernel.find(settings.webdir) != -1: # this distro was originally imported so we know we can clean up the associated # storage as long as nothing else is also using this storage. found = False distros = self.api.distros() for d in distros: if d.kernel.find(path) != -1: found = True if not found: utils.rmtree(path) return True
def _clean_if_requested(self): if not self.build_cfg.do_clean() or not os.path.isdir(self._wrk_dir()): return log.info( "purging build directory") utils.rmtree(self._wrk_dir()) os.mkdir(self._wrk_dir())
def _expandChildren(path, parent, blueprintRoot): if path: path = utils.normalisePath(path) name = path.rsplit("/", 1)[-1] if name in parent.children: package = parent.children[name] else: package = CraftPackageObject(name, parent) package.filePath = path elif blueprintRoot: path = blueprintRoot package = parent else: raise Exception("Unreachable") if not package.categoryInfo: package.categoryInfo = CategoryPackageObject(path) if not package.categoryInfo.valid and package.parent: # we actually need a copy package.categoryInfo = copy.copy(package.parent.categoryInfo) if not package.categoryInfo.valid: package.categoryInfo = CategoryPackageObject(blueprintRoot) for f in os.listdir(path): fPath = os.path.abspath(os.path.join(path, f)) if os.path.isdir(fPath): if not CraftPackageObject._isDirIgnored(f): child = CraftPackageObject._expandChildren( fPath, package, blueprintRoot) if child: if f in package.children: existingNode = package.children[f] if not existingNode.isCategory(): CraftCore.log.warning( f"Blueprint clash detected: Ignoring {child.source} in favour of {existingNode.source}" ) continue else: #merge with existing node existingNode.children.update(child.children) else: package.children[f] = child elif f.endswith(".py"): if package.source: raise BlueprintException( f"Multiple py files in one directory: {package.source} and {f}", package) if f[:-3] != package.name: raise BlueprintException( f"Recipes must match the name of the directory: {fPath}", package) package.source = fPath CraftPackageObject._allLeaves[package.path] = package if package.children and package.source: raise BlueprintException( f"{package} has has children but also a recipe {package.source}!", package) if path != blueprintRoot: if not package.source and not package.children: if os.listdir(path) in [["__pycache__"], []]: # the recipe was removed utils.rmtree(path) else: CraftCore.log.warning( f"Found an dead branch in {blueprintRoot}/{package.path}\n" f"You might wan't to run \"git clean -xdf\" in that directry." ) return None return package
print("Checking out submodule %s to branch %s" % (module_name, module_version)) module_dir = os.path.join(submodules_dir, module_name) os.chdir(module_dir) git_checkout_cmd = ["git", "checkout", module_version] if run_process(git_checkout_cmd) != 0: raise DistutilsSetupError( "Failed to initialize the git submodule %s" % module_name) os.chdir(script_dir) # Clean up temp and package folders for n in ["pyside_package", "build", "PySide-%s" % __version__]: d = os.path.join(script_dir, n) if os.path.isdir(d): print("Removing %s" % d) rmtree(d) # Prepare package folders for pkg in ["pyside_package/PySide", "pyside_package/pysideuic"]: pkg_dir = os.path.join(script_dir, pkg) os.makedirs(pkg_dir) # TODO: # This class can be removed after OSX support # is implemented in pyside_build.update_rpath() class pyside_install(_install): def run(self): _install.run(self) # Custom script we run at the end of installing # If self.root has a value, it means we are being "installed" into
def unpack(self): if not AutoToolsPackageBase.unpack(self): return False for d in ["freetype", "jpeg", "libpng", "lcms", "lcms2", "tiff"]:#, "zlib"]: #openjpeg] utils.rmtree( os.path.join(self.sourceDir(), d)) return True