def isRelevant(self, vardb, config, profile): """ This function takes a dict of keyword arguments; one should pass in any objects need to do to lookups (like what keywords we are on, what profile, and a vardb so we can look at installed packages). Each restriction will pluck out the items that are required for it to match or raise a ValueError exception if the required object is not present. Restrictions of the form Display-X are OR'd with like-restrictions; otherwise restrictions are AND'd. any_match is the ORing and all_match is the ANDing. """ if not self._parsed: self.parse() if not len(self.restrictions): return True kwargs = \ { 'vardb' : vardb, 'config' : config, 'profile' : profile } all_match = True for values in self.restrictions.values(): any_match = False for restriction in values: if restriction.checkRestriction( **portage._native_kwargs(kwargs)): any_match = True if not any_match: all_match = False return all_match
def update(self): ''' Update existing git repository, and ignore the syncuri. We are going to trust the user and assume that the user is in the branch that he/she wants updated. We'll let the user manage branches with git directly. ''' git_cmd_opts = "" if self.settings.get("PORTAGE_QUIET") == "1": git_cmd_opts += " --quiet" git_cmd = "%s pull%s" % (self.bin_command, git_cmd_opts) writemsg_level(git_cmd + "\n") rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"] previous_rev = subprocess.check_output(rev_cmd, cwd=portage._unicode_encode( self.repo.location)) exitcode = portage.process.spawn_bash( "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git pull error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) current_rev = subprocess.check_output(rev_cmd, cwd=portage._unicode_encode( self.repo.location)) return (os.EX_OK, current_rev != previous_rev)
def update(self): ''' Update existing git repository, and ignore the syncuri. We are going to trust the user and assume that the user is in the branch that he/she wants updated. We'll let the user manage branches with git directly. ''' git_cmd_opts = "" if self.settings.get("PORTAGE_QUIET") == "1": git_cmd_opts += " --quiet" git_cmd = "%s pull%s" % (self.bin_command, git_cmd_opts) writemsg_level(git_cmd + "\n") rev_cmd = [self.bin_command, "rev-list", "--max-count=1", "HEAD"] previous_rev = subprocess.check_output(rev_cmd, cwd=portage._unicode_encode(self.repo.location)) exitcode = portage.process.spawn_bash("cd %s ; exec %s" % ( portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git pull error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) current_rev = subprocess.check_output(rev_cmd, cwd=portage._unicode_encode(self.repo.location)) return (os.EX_OK, current_rev != previous_rev)
def new(self, **kwargs): '''Do the initial clone of the repository''' if kwargs: self._kwargs(kwargs) emerge_config = self.options.get('emerge_config', None) portdb = self.options.get('portdb', None) try: if not os.path.exists(self.repo.location): os.makedirs(self.repo.location) self.logger(self.xterm_titles, 'Created new directory %s' % self.repo.location) except IOError: return (1, False) msg = ">>> Cloning git repository from upstream into %s..." % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") sync_uri = self.repo.sync_uri if sync_uri.startswith("file://"): sync_uri = sync_uri[6:] exitcode = portage.process.spawn_bash("cd %s ; %s clone %s ." % \ (portage._shell_quote(self.repo.location), self.bin_command, portage._shell_quote(sync_uri)), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git clone error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) msg = ">>> Git clone successful" self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") return (os.EX_OK, True)
def new(self, **kwargs): '''Do the initial clone of the repository''' if kwargs: self._kwargs(kwargs) try: if not os.path.exists(self.repo.location): os.makedirs(self.repo.location) self.logger(self.xterm_titles, 'Created new directory %s' % self.repo.location) except IOError: return (1, False) sync_uri = self.repo.sync_uri if sync_uri.startswith("file://"): sync_uri = sync_uri[6:] git_cmd_opts = "" if self.settings.get("PORTAGE_QUIET") == "1": git_cmd_opts += " --quiet" if self.repo.sync_depth is not None: git_cmd_opts += " --depth %d" % self.repo.sync_depth git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts, portage._shell_quote(sync_uri)) writemsg_level(git_cmd + "\n") exitcode = portage.process.spawn_bash("cd %s ; exec %s" % ( portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git clone error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (os.EX_OK, True)
def _sync(self): ''' Update existing git repository, and ignore the syncuri. We are going to trust the user and assume that the user is in the branch that he/she wants updated. We'll let the user manage branches with git directly. ''' # No kwargs call here; this is internal, so it should have been # called by something which set the internal variables emerge_config = self.options.get('emerge_config', None) portdb = self.options.get('portdb', None) msg = ">>> Starting git pull in %s..." % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") exitcode = portage.process.spawn_bash("cd %s ; git pull" % \ (portage._shell_quote(self.repo.location),), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git pull error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) msg = ">>> Git pull successful: %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") return (os.EX_OK, True)
def _init(experimental_inherit=False): global _constant_checks, _eclass_info if not experimental_inherit: # Emulate the old eprefixify.defined and inherit.autotools checks. _eclass_info = { 'autotools': { 'funcs': ('eaclocal', 'eautoconf', 'eautoheader', 'eautomake', 'eautoreconf', '_elibtoolize', 'eautopoint'), 'comprehensive': True, 'ignore_missing': True, 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils') }, 'prefix': { 'funcs': ('eprefixify', ), 'comprehensive': False } } _constant_checks = tuple( chain((v() for k, v in globals().items() if isinstance(v, type) and issubclass(v, LineCheck) and v not in _base_check_classes), (InheritEclass(k, **portage._native_kwargs(kwargs)) for k, kwargs in _eclass_info.items())))
def _init(experimental_inherit=False): global _constant_checks, _eclass_info if not experimental_inherit: # Emulate the old eprefixify.defined and inherit.autotools checks. _eclass_info = { "autotools": { "funcs": ( "eaclocal", "eautoconf", "eautoheader", "eautomake", "eautoreconf", "_elibtoolize", "eautopoint", ), "comprehensive": True, "ignore_missing": True, "exempt_eclasses": ("git", "git-2", "subversion", "autotools-utils"), }, "prefix": {"funcs": ("eprefixify",), "comprehensive": False}, } _constant_checks = tuple( chain( ( v() for k, v in globals().items() if isinstance(v, type) and issubclass(v, LineCheck) and v not in _base_check_classes ), (InheritEclass(k, **portage._native_kwargs(kwargs)) for k, kwargs in _eclass_info.items()), ) )
def _sync(self): """ Internal function to sync an existing CVS repository @return: tuple of return code (0=success), whether the cache needs to be updated @rtype: (int, bool) """ cvs_root = self.repo.sync_uri if cvs_root.startswith("cvs://"): cvs_root = cvs_root[6:] # cvs update msg = ">>> Starting cvs update with %s..." % self.repo.sync_uri self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") exitcode = portage.process.spawn_bash( "cd %s; exec cvs -z0 -q update -dP" % (portage._shell_quote(self.repo.location),), **portage._native_kwargs(self.spawn_kwargs) ) if exitcode != os.EX_OK: msg = "!!! cvs update error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (exitcode, False)
def _init(experimental_inherit=False): global _constant_checks, _eclass_info if not experimental_inherit: # Emulate the old eprefixify.defined and inherit.autotools checks. _eclass_info = { 'autotools': { 'funcs': ( 'eaclocal', 'eautoconf', 'eautoheader', 'eautomake', 'eautoreconf', '_elibtoolize', 'eautopoint' ), 'comprehensive': True, 'ignore_missing': True, 'exempt_eclasses': ('git', 'git-2', 'subversion', 'autotools-utils') }, 'prefix': { 'funcs': ( 'eprefixify', ), 'comprehensive': False } } _constant_checks = tuple(chain((v() for k, v in globals().items() if isinstance(v, type) and issubclass(v, LineCheck) and v not in _base_check_classes), (InheritEclass(k, **portage._native_kwargs(kwargs)) for k, kwargs in _eclass_info.items())))
def new(self, **kwargs): if kwargs: self._kwargs(kwargs) # initial checkout msg = ">>> Starting initial cvs checkout with %s..." % self.repo.sync_uri self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") try: os.rmdir(self.repo.location) except OSError as e: if e.errno != errno.ENOENT: msg = "!!! existing '%s' directory; exiting." % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (1, False) del e cvs_root = self.repo.sync_uri if ( portage.process.spawn_bash( "cd %s; exec cvs -z0 -d %s co -P -d %s %s" % ( portage._shell_quote(os.path.dirname(self.repo.location)), portage._shell_quote(cvs_root), portage._shell_quote(os.path.basename(self.repo.location)), portage._shell_quote(self.repo.sync_cvs_repo), ), **portage._native_kwargs(self.spawn_kwargs) ) != os.EX_OK ): msg = "!!! cvs checkout error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (1, False) return (0, False)
def new(self, **kwargs): '''Do the initial clone of the repository''' if kwargs: self._kwargs(kwargs) try: if not os.path.exists(self.repo.location): os.makedirs(self.repo.location) self.logger(self.xterm_titles, 'Created new directory %s' % self.repo.location) except IOError: return (1, False) sync_uri = self.repo.sync_uri if sync_uri.startswith("file://"): sync_uri = sync_uri[6:] depth_arg = '' if self.repo.sync_depth is not None: depth_arg = '--depth %d ' % self.repo.sync_depth git_cmd = "%s clone %s%s ." % (self.bin_command, depth_arg, portage._shell_quote(sync_uri)) writemsg_level(git_cmd + "\n") exitcode = portage.process.spawn_bash("cd %s ; exec %s" % ( portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git clone error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (os.EX_OK, True)
def load_manifest(self, *args, **kwds): kwds['thin'] = self.thin_manifest kwds['allow_missing'] = self.allow_missing_manifest kwds['allow_create'] = self.create_manifest kwds['hashes'] = self.manifest_hashes if self.disable_manifest: kwds['from_scratch'] = True kwds['find_invalid_path_char'] = self.find_invalid_path_char return manifest.Manifest(*args, **portage._native_kwargs(kwds))
def load_manifest(self, *args, **kwds): kwds["thin"] = self.thin_manifest kwds["allow_missing"] = self.allow_missing_manifest kwds["allow_create"] = self.create_manifest kwds["hashes"] = self.manifest_hashes if self.disable_manifest: kwds["from_scratch"] = True kwds["find_invalid_path_char"] = self.find_invalid_path_char return manifest.Manifest(*args, **portage._native_kwargs(kwds))
def new(self, **kwargs): if kwargs: self._kwargs(kwargs) #initial checkout svn_root = self.repo.sync_uri exitcode = portage.process.spawn_bash( "cd %s; exec svn co %s ." % (portage._shell_quote( self.repo.location), portage._shell_quote(svn_root)), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! svn checkout error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (exitcode, False)
def exists(self, **kwargs): '''Tests whether the repo actually exists''' if kwargs: self._kwargs(kwargs) elif not self.repo: return False if not os.path.exists(self.repo.location): return False exitcode = portage.process.spawn_bash("cd %s ; git rev-parse" %\ (portage._shell_quote(self.repo.location),), **portage._native_kwargs(self.spawn_kwargs)) if exitcode == 128: return False return True
def new(self, **kwargs): if kwargs: self._kwargs(kwargs) #initial checkout svn_root = self.repo.sync_uri exitcode = portage.process.spawn_bash( "cd %s; exec svn co %s ." % (portage._shell_quote(self.repo.location), portage._shell_quote(svn_root)), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! svn checkout error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (exitcode, False)
def new(self, **kwargs): if kwargs: self._kwargs(kwargs) #initial checkout cvs_root = self.repo.sync_uri if portage.process.spawn_bash( "cd %s; exec cvs -z0 -d %s co -P -d %s %s" % (portage._shell_quote(os.path.dirname(self.repo.location)), portage._shell_quote(cvs_root), portage._shell_quote(os.path.basename(self.repo.location)), portage._shell_quote(self.repo.module_specific_options["sync-cvs-repo"])), **portage._native_kwargs(self.spawn_kwargs)) != os.EX_OK: msg = "!!! cvs checkout error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (1, False) return (0, False)
def new(self, **kwargs): if kwargs: self._kwargs(kwargs) #initial checkout cvs_root = self.repo.sync_uri if portage.process.spawn_bash( "cd %s; exec cvs -z0 -d %s co -P -d %s %s" % (portage._shell_quote(os.path.dirname(self.repo.location)), portage._shell_quote(cvs_root), portage._shell_quote(os.path.basename(self.repo.location)), portage._shell_quote(self.repo.sync_cvs_repo)), **portage._native_kwargs(self.spawn_kwargs)) != os.EX_OK: msg = "!!! cvs checkout error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (1, False) return (0, False)
def _svn_upgrade(self): """ Internal function which performs an svn upgrade on the repo @return: tuple of return code (0=success), whether the cache needs to be updated @rtype: (int, bool) """ exitcode = portage.process.spawn_bash( "cd %s; exec svn upgrade" % (portage._shell_quote(self.repo.location),), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! svn upgrade error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return exitcode
def _svn_upgrade(self): """ Internal function which performs an svn upgrade on the repo @return: tuple of return code (0=success), whether the cache needs to be updated @rtype: (int, bool) """ exitcode = portage.process.spawn_bash( "cd %s; exec svn upgrade" % (portage._shell_quote(self.repo.location), ), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! svn upgrade error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return exitcode
def sync(self, **kwargs): '''Sync the repository''' if kwargs: self._kwargs(kwargs) if not self.has_bin: return (1, False) exitcode = portage.process.spawn_bash("%s" % \ (self.bin_command), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! emerge-webrsync error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (exitcode, True)
def _spawn(self, args, **kwargs): if self._dummy_pipe_fd is not None: self.settings["PORTAGE_PIPE_FD"] = str(self._dummy_pipe_fd) if "fakeroot" in self.settings.features: kwargs["fakeroot"] = True # Temporarily unset EBUILD_PHASE so that bashrc code doesn't # think this is a real phase. phase_backup = self.settings.pop("EBUILD_PHASE", None) try: return spawn(" ".join(args), self.settings, **portage._native_kwargs(kwargs)) finally: if phase_backup is not None: self.settings["EBUILD_PHASE"] = phase_backup self.settings.pop("PORTAGE_PIPE_FD", None)
def update(self): ''' Update existing repository''' args = [] msg = '>>> Starting layman sync for %(repo)s...'\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + '\n') location = self.repo.location.replace(self.repo.name, '') args.append('layman') self._get_optargs(args) args.append('--storage') args.append(location) args.append('-s') args.append(self.repo.name) command = ' '.join(args) exitcode = portage.process.spawn_bash("%(command)s" % \ ({'command': command}), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: exitcode = self.new()[0] if exitcode != os.EX_OK: msg = "!!! layman sync error in %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) else: return (exitcode, True) msg = ">>> layman sync succeeded: %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") msg = '>>> laymansync sez... "Hasta la sync ya, baby!"' self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") return (exitcode, True)
def update(self): """ Internal function to update an existing CVS repository @return: tuple of return code (0=success), whether the cache needs to be updated @rtype: (int, bool) """ #cvs update exitcode = portage.process.spawn_bash( "cd %s; exec cvs -z0 -q update -dP" % \ (portage._shell_quote(self.repo.location),), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! cvs update error; exiting." self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", noiselevel=-1, level=logging.ERROR) return (exitcode, False)
def update(self): ''' Update existing git repository, and ignore the syncuri. We are going to trust the user and assume that the user is in the branch that he/she wants updated. We'll let the user manage branches with git directly. ''' git_cmd = "%s pull" % self.bin_command writemsg_level(git_cmd + "\n") exitcode = portage.process.spawn_bash("cd %s ; exec %s" % ( portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git pull error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (os.EX_OK, True)
def sync(self, **kwargs): '''Sync the repository''' if kwargs: self._kwargs(kwargs) if not self.has_bin: return (1, False) # filter these out to prevent gpg errors for var in ['uid', 'gid', 'groups']: self.spawn_kwargs.pop(var, None) exitcode = portage.process.spawn_bash("%s" % \ (self.bin_command), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! emerge-webrsync error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (exitcode, True)
def _sync(self): ''' Update existing repository ''' emerge_config = self.options.get('emerge_config', None) portdb = self.options.get('portdb', None) msg = ">>> Starting emerge-webrsync for %s..." % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") exitcode = portage.process.spawn_bash("%s" % \ (self.bin_command), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! emerge-webrsync error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) msg = ">>> Emerge-webrsync successful: %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") #return self.post_sync(portdb, self.repo.location, emerge_config) return (exitcode, True)
def new(self, **kwargs): '''Use layman to install the repository''' if kwargs: self._kwargs(kwargs) args = [] msg = '>>> Starting to add new layman overlay %(repo)s'\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + '\n') location = self.repo.location.replace(self.repo.name, '') args.append('layman') self._get_optargs(args) args.append('--storage') args.append(location) args.append('-a') args.append(self.repo.name) command = ' '.join(args) exitcode = portage.process.spawn_bash("%(command)s" % \ ({'command': command}), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! layman add error in %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) msg = ">>> Addition of layman repo succeeded: %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") msg = '>>> laymansync sez... "Hasta la add ya, baby!"' self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") return (exitcode, True)
def new(self, **kwargs): '''Do the initial clone of the repository''' if kwargs: self._kwargs(kwargs) try: if not os.path.exists(self.repo.location): os.makedirs(self.repo.location) self.logger(self.xterm_titles, 'Created new directory %s' % self.repo.location) except IOError: return (1, False) sync_uri = self.repo.sync_uri if sync_uri.startswith("file://"): sync_uri = sync_uri[6:] git_cmd_opts = "" if self.settings.get("PORTAGE_QUIET") == "1": git_cmd_opts += " --quiet" if self.repo.sync_depth is not None: git_cmd_opts += " --depth %d" % self.repo.sync_depth if self.repo.module_specific_options.get('sync-git-clone-extra-opts'): git_cmd_opts += " %s" % self.repo.module_specific_options[ 'sync-git-clone-extra-opts'] git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts, portage._shell_quote(sync_uri)) writemsg_level(git_cmd + "\n") exitcode = portage.process.spawn_bash( "cd %s ; exec %s" % (portage._shell_quote(self.repo.location), git_cmd), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! git clone error in %s" % self.repo.location self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) return (exitcode, False) return (os.EX_OK, True)
def _sync(self): ''' Update existing repository''' emerge_config = self.options.get('emerge_config', None) portdb = self.options.get('portdb', None) args = [] msg = '>>> Starting layman sync for %(repo)s...'\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + '\n') location = self.repo.location.replace(self.repo.name, '') args.append('layman') self._get_optargs(args) args.append('--storage') args.append(location) args.append('-s') args.append(self.repo.name) command = ' '.join(args) exitcode = portage.process.spawn_bash("%(command)s" % \ ({'command': command}), **portage._native_kwargs(self.spawn_kwargs)) if exitcode != os.EX_OK: msg = "!!! layman sync error in %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1) overlay = create_overlay(repo=self.repo, logger=self.logger, xterm_titles=self.xterm_titles) return (exitcode, False) msg = ">>> layman sync succeeded: %(repo)s"\ % ({'repo': self.repo.name}) self.logger(self.xterm_titles, msg) writemsg_level(msg + "\n") return (exitcode, True)
def verbose_size(self, pkg, repoadd_set, pkg_info): """Determines the size of the downloads required @param pkg: _emerge.Package.Package instance @param repoadd_set: set of repos to add @param pkg_info: dictionary Modifies class globals: self.myfetchlist, self.counters.totalsize, self.verboseadd, repoadd_set. """ mysize = 0 if pkg.type_name in ("binary", "ebuild") and pkg_info.merge: db = pkg.root_config.trees[ pkg.root_config.pkg_tree_map[pkg.type_name]].dbapi kwargs = {} if pkg.type_name == "ebuild": kwargs["useflags"] = pkg_info.use kwargs["myrepo"] = pkg.repo myfilesdict = None try: myfilesdict = db.getfetchsizes(pkg.cpv, **portage._native_kwargs(kwargs)) except InvalidDependString as e: # FIXME: validate SRC_URI earlier depstr, = db.aux_get(pkg.cpv, ["SRC_URI"], myrepo=pkg.repo) show_invalid_depstring_notice( pkg, depstr, _unicode(e)) raise except SignatureException: # missing/invalid binary package SIZE signature pass if myfilesdict is None: myfilesdict = "[empty/missing/bad digest]" else: for myfetchfile in myfilesdict: if myfetchfile not in self.myfetchlist: mysize += myfilesdict[myfetchfile] self.myfetchlist.add(myfetchfile) if pkg_info.ordered: self.counters.totalsize += mysize self.verboseadd += _format_size(mysize) if self.quiet_repo_display: # overlay verbose # assign index for a previous version in the same slot if pkg_info.previous_pkg is not None: repo_name_prev = pkg_info.previous_pkg.repo else: repo_name_prev = None # now use the data to generate output if pkg.installed or pkg_info.previous_pkg is None: self.repoadd = self.conf.repo_display.repoStr( pkg_info.repo_path_real) else: repo_path_prev = None if repo_name_prev: repo_path_prev = self.portdb.getRepositoryPath( repo_name_prev) if repo_path_prev == pkg_info.repo_path_real: self.repoadd = self.conf.repo_display.repoStr( pkg_info.repo_path_real) else: self.repoadd = "%s=>%s" % ( self.conf.repo_display.repoStr(repo_path_prev), self.conf.repo_display.repoStr(pkg_info.repo_path_real)) if self.repoadd: repoadd_set.add(self.repoadd)
def _do_rsync(self, syncuri, timestamp, opts): is_synced = False if timestamp != 0 and "--quiet" not in opts: print(">>> Checking server timestamp ...") rsynccommand = [self.bin_command ] + self.rsync_opts + self.extra_rsync_opts if self.proto == 'ssh' and self.ssh_opts: rsynccommand.append("--rsh=ssh " + self.ssh_opts) if "--debug" in opts: print(rsynccommand) exitcode = os.EX_OK servertimestamp = 0 # Even if there's no timestamp available locally, fetch the # timestamp anyway as an initial probe to verify that the server is # responsive. This protects us from hanging indefinitely on a # connection attempt to an unresponsive server which rsync's # --timeout option does not prevent. #if True: # Temporary file for remote server timestamp comparison. # NOTE: If FEATURES=usersync is enabled then the tempfile # needs to be in a directory that's readable by the usersync # user. We assume that PORTAGE_TMPDIR will satisfy this # requirement, since that's not necessarily true for the # default directory used by the tempfile module. if self.usersync_uid is not None: tmpdir = self.settings['PORTAGE_TMPDIR'] else: # use default dir from tempfile module tmpdir = None fd, tmpservertimestampfile = \ tempfile.mkstemp(dir=tmpdir) os.close(fd) if self.usersync_uid is not None: portage.util.apply_permissions(tmpservertimestampfile, uid=self.usersync_uid) command = rsynccommand[:] command.append(syncuri.rstrip("/") + \ "/metadata/timestamp.chk") command.append(tmpservertimestampfile) content = None pids = [] try: # Timeout here in case the server is unresponsive. The # --timeout rsync option doesn't apply to the initial # connection attempt. try: if self.rsync_initial_timeout: portage.exception.AlarmSignal.register( self.rsync_initial_timeout) pids.extend( portage.process.spawn(command, returnpid=True, **portage._native_kwargs( self.spawn_kwargs))) exitcode = os.waitpid(pids[0], 0)[1] if self.usersync_uid is not None: portage.util.apply_permissions(tmpservertimestampfile, uid=os.getuid()) content = portage.grabfile(tmpservertimestampfile) finally: if self.rsync_initial_timeout: portage.exception.AlarmSignal.unregister() try: os.unlink(tmpservertimestampfile) except OSError: pass except portage.exception.AlarmSignal: # timed out print('timed out') # With waitpid and WNOHANG, only check the # first element of the tuple since the second # element may vary (bug #337465). if pids and os.waitpid(pids[0], os.WNOHANG)[0] == 0: os.kill(pids[0], signal.SIGTERM) os.waitpid(pids[0], 0) # This is the same code rsync uses for timeout. exitcode = 30 else: if exitcode != os.EX_OK: if exitcode & 0xff: exitcode = (exitcode & 0xff) << 8 else: exitcode = exitcode >> 8 if content: try: servertimestamp = time.mktime( time.strptime(content[0], TIMESTAMP_FORMAT)) except (OverflowError, ValueError): pass del command, pids, content if exitcode == os.EX_OK: if (servertimestamp != 0) and (servertimestamp == timestamp): is_synced = True self.logger(self.xterm_titles, ">>> Cancelling sync -- Already current.") print() print(">>>") print( ">>> Timestamps on the server and in the local repository are the same." ) print( ">>> Cancelling all further sync action. You are already up to date." ) print(">>>") print(">>> In order to force sync, remove '%s'." % self.servertimestampfile) print(">>>") print() return is_synced, exitcode elif (servertimestamp != 0) and (servertimestamp < timestamp): self.logger(self.xterm_titles, ">>> Server out of date: %s" % syncuri) print() print(">>>") print(">>> SERVER OUT OF DATE: %s" % syncuri) print(">>>") print(">>> In order to force sync, remove '%s'." % self.servertimestampfile) print(">>>") print() exitcode = SERVER_OUT_OF_DATE elif (servertimestamp == 0) or (servertimestamp > timestamp): # actual sync command = rsynccommand[:] submodule_paths = self._get_submodule_paths() if submodule_paths: # The only way to select multiple directories to # sync, without calling rsync multiple times, is # to use --relative. command.append("--relative") for path in submodule_paths: # /./ is special syntax supported with the # rsync --relative option. command.append(syncuri + "/./" + path) command.append(self.repo.location) else: command.extend([syncuri + "/", self.repo.location]) exitcode = None try: exitcode = portage.process.spawn( command, **portage._native_kwargs(self.spawn_kwargs)) finally: if exitcode is None: # interrupted exitcode = 128 + signal.SIGINT # 0 Success # 1 Syntax or usage error # 2 Protocol incompatibility # 5 Error starting client-server protocol # 35 Timeout waiting for daemon connection if exitcode not in (0, 1, 2, 5, 35): # If the exit code is not among those listed above, # then we may have a partial/inconsistent sync # state, so our previously read timestamp as well # as the corresponding file can no longer be # trusted. timestamp = 0 try: os.unlink(self.servertimestampfile) except OSError: pass if exitcode in [0, 1, 3, 4, 11, 14, 20, 21]: is_synced = True elif exitcode in [1, 3, 4, 11, 14, 20, 21]: is_synced = True else: # Code 2 indicates protocol incompatibility, which is expected # for servers with protocol < 29 that don't support # --prune-empty-directories. Retry for a server that supports # at least rsync protocol version 29 (>=rsync-2.6.4). pass return is_synced, exitcode
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) # Perform repos.conf sync variable validation portage.sync.validate_config(repo, logging) # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo
def __init__(self, _unused_param=DeprecationWarning, mysettings=None): """ @param _unused_param: deprecated, use mysettings['PORTDIR'] instead @type _unused_param: None @param mysettings: an immutable config instance @type mysettings: portage.config """ from portage import config if mysettings: self.settings = mysettings else: from portage import settings self.settings = config(clone=settings) if _unused_param is not DeprecationWarning: warnings.warn("The first parameter of the " + \ "portage.dbapi.porttree.portdbapi" + \ " constructor is unused since portage-2.1.8. " + \ "mysettings['PORTDIR'] is used instead.", DeprecationWarning, stacklevel=2) self.repositories = self.settings.repositories self.treemap = self.repositories.treemap # This is strictly for use in aux_get() doebuild calls when metadata # is generated by the depend phase. It's safest to use a clone for # this purpose because doebuild makes many changes to the config # instance that is passed in. self.doebuild_settings = config(clone=self.settings) self.depcachedir = os.path.realpath(self.settings.depcachedir) if os.environ.get("SANDBOX_ON") == "1": # Make api consumers exempt from sandbox violations # when doing metadata cache updates. sandbox_write = os.environ.get("SANDBOX_WRITE", "").split(":") if self.depcachedir not in sandbox_write: sandbox_write.append(self.depcachedir) os.environ["SANDBOX_WRITE"] = \ ":".join(filter(None, sandbox_write)) self.porttrees = list(self.settings.repositories.repoLocationList()) # This is used as sanity check for aux_get(). If there is no # root eclass dir, we assume that PORTDIR is invalid or # missing. This check allows aux_get() to detect a missing # portage tree and return early by raising a KeyError. self._have_root_eclass_dir = os.path.isdir( os.path.join(self.settings.repositories.mainRepoLocation(), "eclass")) #if the portdbapi is "frozen", then we assume that we can cache everything (that no updates to it are happening) self.xcache = {} self.frozen = 0 #Keep a list of repo names, sorted by priority (highest priority first). self._ordered_repo_name_list = tuple(reversed(self.repositories.prepos_order)) self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule") self.auxdb = {} self._pregen_auxdb = {} # If the current user doesn't have depcachedir write permission, # then the depcachedir cache is kept here read-only access. self._ro_auxdb = {} self._init_cache_dirs() try: depcachedir_st = os.stat(self.depcachedir) depcachedir_w_ok = os.access(self.depcachedir, os.W_OK) except OSError: depcachedir_st = None depcachedir_w_ok = False cache_kwargs = {} depcachedir_unshared = False if portage.data.secpass < 1 and \ depcachedir_w_ok and \ depcachedir_st is not None and \ os.getuid() == depcachedir_st.st_uid and \ os.getgid() == depcachedir_st.st_gid: # If this user owns depcachedir and is not in the # portage group, then don't bother to set permissions # on cache entries. This makes it possible to run # egencache without any need to be a member of the # portage group. depcachedir_unshared = True else: cache_kwargs.update(portage._native_kwargs({ 'gid' : portage_gid, 'perms' : 0o664 })) # If secpass < 1, we don't want to write to the cache # since then we won't be able to apply group permissions # to the cache entries/directories. if (secpass < 1 and not depcachedir_unshared) or not depcachedir_w_ok: for x in self.porttrees: self.auxdb[x] = volatile.database( self.depcachedir, x, self._known_keys, **cache_kwargs) try: self._ro_auxdb[x] = self.auxdbmodule(self.depcachedir, x, self._known_keys, readonly=True, **cache_kwargs) except CacheError: pass else: for x in self.porttrees: if x in self.auxdb: continue # location, label, auxdbkeys self.auxdb[x] = self.auxdbmodule( self.depcachedir, x, self._known_keys, **cache_kwargs) if "metadata-transfer" not in self.settings.features: for x in self.porttrees: if x in self._pregen_auxdb: continue cache = self._create_pregen_cache(x) if cache is not None: self._pregen_auxdb[x] = cache # Selectively cache metadata in order to optimize dep matching. self._aux_cache_keys = set( ["DEPEND", "EAPI", "HDEPEND", "INHERITED", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository", "RESTRICT", "SLOT", "DEFINED_PHASES", "REQUIRED_USE"]) self._aux_cache = {} self._broken_ebuilds = set()
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) if repo.sync_type is not None and repo.sync_uri is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute, but is missing sync-uri attribute") % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_uri is not None and repo.sync_type is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-uri attribute, but is missing sync-type attribute") % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_type not in (None, "cvs", "git", "rsync"): writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type attribute set to unsupported value: '%s'") % (sname, repo.sync_type), level=logging.ERROR, noiselevel=-1) continue if repo.sync_type == "cvs" and repo.sync_cvs_repo is None: writemsg_level("!!! %s\n" % _("Repository '%s' has sync-type=cvs, but is missing sync-cvs-repo attribute") % sname, level=logging.ERROR, noiselevel=-1) continue # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo
def _do_rsync(self, syncuri, timestamp, opts): is_synced = False if timestamp != 0 and "--quiet" not in opts: print(">>> Checking server timestamp ...") rsynccommand = [self.bin_command] + self.rsync_opts + self.extra_rsync_opts if self.proto == 'ssh' and self.ssh_opts: rsynccommand.append("--rsh=ssh " + self.ssh_opts) if "--debug" in opts: print(rsynccommand) exitcode = os.EX_OK servertimestamp = 0 # Even if there's no timestamp available locally, fetch the # timestamp anyway as an initial probe to verify that the server is # responsive. This protects us from hanging indefinitely on a # connection attempt to an unresponsive server which rsync's # --timeout option does not prevent. #if True: # Temporary file for remote server timestamp comparison. # NOTE: If FEATURES=usersync is enabled then the tempfile # needs to be in a directory that's readable by the usersync # user. We assume that PORTAGE_TMPDIR will satisfy this # requirement, since that's not necessarily true for the # default directory used by the tempfile module. if self.usersync_uid is not None: tmpdir = self.settings['PORTAGE_TMPDIR'] else: # use default dir from tempfile module tmpdir = None fd, tmpservertimestampfile = \ tempfile.mkstemp(dir=tmpdir) os.close(fd) if self.usersync_uid is not None: portage.util.apply_permissions(tmpservertimestampfile, uid=self.usersync_uid) command = rsynccommand[:] command.append(syncuri.rstrip("/") + \ "/metadata/timestamp.chk") command.append(tmpservertimestampfile) content = None pids = [] try: # Timeout here in case the server is unresponsive. The # --timeout rsync option doesn't apply to the initial # connection attempt. try: if self.rsync_initial_timeout: portage.exception.AlarmSignal.register( self.rsync_initial_timeout) pids.extend(portage.process.spawn( command, returnpid=True, **portage._native_kwargs(self.spawn_kwargs))) exitcode = os.waitpid(pids[0], 0)[1] if self.usersync_uid is not None: portage.util.apply_permissions(tmpservertimestampfile, uid=os.getuid()) content = portage.grabfile(tmpservertimestampfile) finally: if self.rsync_initial_timeout: portage.exception.AlarmSignal.unregister() try: os.unlink(tmpservertimestampfile) except OSError: pass except portage.exception.AlarmSignal: # timed out print('timed out') # With waitpid and WNOHANG, only check the # first element of the tuple since the second # element may vary (bug #337465). if pids and os.waitpid(pids[0], os.WNOHANG)[0] == 0: os.kill(pids[0], signal.SIGTERM) os.waitpid(pids[0], 0) # This is the same code rsync uses for timeout. exitcode = 30 else: if exitcode != os.EX_OK: if exitcode & 0xff: exitcode = (exitcode & 0xff) << 8 else: exitcode = exitcode >> 8 if content: try: servertimestamp = time.mktime(time.strptime( content[0], TIMESTAMP_FORMAT)) except (OverflowError, ValueError): pass del command, pids, content if exitcode == os.EX_OK: if (servertimestamp != 0) and (servertimestamp == timestamp): self.logger(self.xterm_titles, ">>> Cancelling sync -- Already current.") print() print(">>>") print(">>> Timestamps on the server and in the local repository are the same.") print(">>> Cancelling all further sync action. You are already up to date.") print(">>>") print(">>> In order to force sync, remove '%s'." % self.servertimestampfile) print(">>>") print() return is_synced, exitcode elif (servertimestamp != 0) and (servertimestamp < timestamp): self.logger(self.xterm_titles, ">>> Server out of date: %s" % syncuri) print() print(">>>") print(">>> SERVER OUT OF DATE: %s" % syncuri) print(">>>") print(">>> In order to force sync, remove '%s'." % self.servertimestampfile) print(">>>") print() exitcode = SERVER_OUT_OF_DATE elif (servertimestamp == 0) or (servertimestamp > timestamp): # actual sync command = rsynccommand + [syncuri+"/", self.repo.location] exitcode = None try: exitcode = portage.process.spawn(command, **portage._native_kwargs(self.spawn_kwargs)) finally: if exitcode is None: # interrupted exitcode = 128 + signal.SIGINT # 0 Success # 1 Syntax or usage error # 2 Protocol incompatibility # 5 Error starting client-server protocol # 35 Timeout waiting for daemon connection if exitcode not in (0, 1, 2, 5, 35): # If the exit code is not among those listed above, # then we may have a partial/inconsistent sync # state, so our previously read timestamp as well # as the corresponding file can no longer be # trusted. timestamp = 0 try: os.unlink(self.servertimestampfile) except OSError: pass if exitcode in [0,1,3,4,11,14,20,21]: is_synced = True elif exitcode in [1,3,4,11,14,20,21]: is_synced = True else: # Code 2 indicates protocol incompatibility, which is expected # for servers with protocol < 29 that don't support # --prune-empty-directories. Retry for a server that supports # at least rsync protocol version 29 (>=rsync-2.6.4). pass return is_synced, exitcode
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir): """Parse files in paths to load config""" parser = SafeConfigParser() # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError( "Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) if repo.sync_type is not None and repo.sync_uri is None: writemsg_level("!!! %s\n" % _( "Repository '%s' has sync-type attribute, but is missing sync-uri attribute" ) % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_uri is not None and repo.sync_type is None: writemsg_level("!!! %s\n" % _( "Repository '%s' has sync-uri attribute, but is missing sync-type attribute" ) % sname, level=logging.ERROR, noiselevel=-1) continue if repo.sync_type not in (None, "cvs", "git", "rsync"): writemsg_level("!!! %s\n" % _( "Repository '%s' has sync-type attribute set to unsupported value: '%s'" ) % (sname, repo.sync_type), level=logging.ERROR, noiselevel=-1) continue if repo.sync_type == "cvs" and repo.sync_cvs_repo is None: writemsg_level("!!! %s\n" % _( "Repository '%s' has sync-type=cvs, but is missing sync-cvs-repo attribute" ) % sname, level=logging.ERROR, noiselevel=-1) continue # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo
def verbose_size(self, pkg, repoadd_set, pkg_info): """Determines the size of the downloads required @param pkg: _emerge.Package.Package instance @param repoadd_set: set of repos to add @param pkg_info: dictionary Modifies class globals: self.myfetchlist, self.counters.totalsize, self.verboseadd, repoadd_set. """ mysize = 0 if pkg.type_name in ("binary", "ebuild") and pkg_info.merge: db = pkg.root_config.trees[pkg.root_config.pkg_tree_map[ pkg.type_name]].dbapi kwargs = {} if pkg.type_name == "ebuild": kwargs["useflags"] = pkg_info.use kwargs["myrepo"] = pkg.repo myfilesdict = None try: myfilesdict = db.getfetchsizes( pkg.cpv, **portage._native_kwargs(kwargs)) except InvalidDependString as e: # FIXME: validate SRC_URI earlier depstr, = db.aux_get(pkg.cpv, ["SRC_URI"], myrepo=pkg.repo) show_invalid_depstring_notice(pkg, depstr, _unicode(e)) raise except SignatureException: # missing/invalid binary package SIZE signature pass if myfilesdict is None: myfilesdict = "[empty/missing/bad digest]" else: for myfetchfile in myfilesdict: if myfetchfile not in self.myfetchlist: mysize += myfilesdict[myfetchfile] self.myfetchlist.add(myfetchfile) if pkg_info.ordered: self.counters.totalsize += mysize self.verboseadd += _format_size(mysize) if self.quiet_repo_display: # overlay verbose # assign index for a previous version in the same slot if pkg_info.previous_pkg is not None: repo_name_prev = pkg_info.previous_pkg.repo else: repo_name_prev = None # now use the data to generate output if pkg.installed or pkg_info.previous_pkg is None: self.repoadd = self.conf.repo_display.repoStr( pkg_info.repo_path_real) else: repo_path_prev = None if repo_name_prev: repo_path_prev = self.portdb.getRepositoryPath( repo_name_prev) if repo_path_prev == pkg_info.repo_path_real: self.repoadd = self.conf.repo_display.repoStr( pkg_info.repo_path_real) else: self.repoadd = "%s=>%s" % ( self.conf.repo_display.repoStr(repo_path_prev), self.conf.repo_display.repoStr( pkg_info.repo_path_real)) if self.repoadd: repoadd_set.add(self.repoadd)
def _parse(paths, prepos, ignored_map, ignored_location_map, local_config, portdir, default_opts): """Parse files in paths to load config""" parser = SafeConfigParser(defaults=default_opts) # use read_file/readfp in order to control decoding of unicode try: # Python >=3.2 read_file = parser.read_file source_kwarg = 'source' except AttributeError: read_file = parser.readfp source_kwarg = 'filename' recursive_paths = [] for p in paths: if isinstance(p, basestring): recursive_paths.extend(_recursive_file_list(p)) else: recursive_paths.append(p) for p in recursive_paths: if isinstance(p, basestring): f = None try: f = io.open(_unicode_encode(p, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass else: # The 'source' keyword argument is needed since otherwise # ConfigParser in Python <3.3.3 may throw a TypeError # because it assumes that f.name is a native string rather # than binary when constructing error messages. kwargs = {source_kwarg: p} read_file(f, **portage._native_kwargs(kwargs)) finally: if f is not None: f.close() elif isinstance(p, io.StringIO): kwargs = {source_kwarg: "<io.StringIO>"} read_file(p, **portage._native_kwargs(kwargs)) else: raise TypeError("Unsupported type %r of element %r of 'paths' argument" % (type(p), p)) prepos['DEFAULT'] = RepoConfig("DEFAULT", parser.defaults(), local_config=local_config) for sname in parser.sections(): optdict = {} for oname in parser.options(sname): optdict[oname] = parser.get(sname, oname) repo = RepoConfig(sname, optdict, local_config=local_config) for o in portage.sync.module_specific_options(repo): repo.set_module_specific_opt(o, parser.get(sname, o)) # Perform repos.conf sync variable validation portage.sync.validate_config(repo, logging) # For backward compatibility with locations set via PORTDIR and # PORTDIR_OVERLAY, delay validation of the location and repo.name # until after PORTDIR and PORTDIR_OVERLAY have been processed. prepos[sname] = repo