def manifest2MiscfileFilter(filename): filename = filename.strip(os.sep) if portage._glep_55_enabled: pf, eapi = portage._split_ebuild_name_glep55(filename) if pf is not None: return False return not (filename in ["CVS", ".svn", "files", "Manifest"] or filename.endswith(".ebuild"))
def cp_list(self, mycp, use_cache=1, mytree=None): if self.frozen and mytree is None: cachelist = self.xcache["cp-list"].get(mycp) if cachelist is not None: # Try to propagate this to the match-all cache here for # repoman since he uses separate match-all caches for each # profile (due to old-style virtuals). Do not propagate # old-style virtuals since cp_list() doesn't expand them. if not (not cachelist and mycp.startswith("virtual/")): self.xcache["match-all"][mycp] = cachelist return cachelist[:] mysplit = mycp.split("/") invalid_category = mysplit[0] not in self._categories glep55 = 'parse-eapi-glep-55' in self.doebuild_settings.features d={} if mytree: mytrees = [mytree] else: mytrees = self.porttrees for oroot in mytrees: try: file_list = os.listdir(os.path.join(oroot, mycp)) except OSError: continue for x in file_list: pf = None if glep55: pf, eapi = portage._split_ebuild_name_glep55(x) elif x[-7:] == '.ebuild': pf = x[:-7] if pf is not None: ps = pkgsplit(pf) if not ps: writemsg(_("\nInvalid ebuild name: %s\n") % \ os.path.join(oroot, mycp, x), noiselevel=-1) continue if ps[0] != mysplit[1]: writemsg(_("\nInvalid ebuild name: %s\n") % \ os.path.join(oroot, mycp, x), noiselevel=-1) continue ver_match = ver_regexp.match("-".join(ps[1:])) if ver_match is None or not ver_match.groups(): writemsg(_("\nInvalid ebuild version: %s\n") % \ os.path.join(oroot, mycp, x), noiselevel=-1) continue d[mysplit[0]+"/"+pf] = None if invalid_category and d: writemsg(_("\n!!! '%s' has a category that is not listed in " \ "%setc/portage/categories\n") % \ (mycp, self.settings["PORTAGE_CONFIGROOT"]), noiselevel=-1) mylist = [] else: mylist = list(d) # Always sort in ascending order here since it's handy # and the result can be easily cached and reused. self._cpv_sort_ascending(mylist) if self.frozen and mytree is None: cachelist = mylist[:] self.xcache["cp-list"][mycp] = cachelist # Do not propagate old-style virtuals since # cp_list() doesn't expand them. if not (not cachelist and mycp.startswith("virtual/")): self.xcache["match-all"][mycp] = cachelist return mylist
def aux_get(self, mycpv, mylist, mytree=None): "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc." 'input: "sys-apps/foo-1.0",["SLOT","DEPEND","HOMEPAGE"]' 'return: ["0",">=sys-libs/bar-1.0","http://www.foo.com"] or raise KeyError if error' cache_me = False if not mytree: cache_me = True if not mytree and not self._known_keys.intersection( mylist).difference(self._aux_cache_keys): aux_cache = self._aux_cache.get(mycpv) if aux_cache is not None: return [aux_cache.get(x, "") for x in mylist] cache_me = True global auxdbkeys, auxdbkeylen try: cat, pkg = mycpv.split("/", 1) except ValueError: # Missing slash. Can't find ebuild so raise KeyError. raise KeyError(mycpv) myebuild, mylocation = self.findname2(mycpv, mytree) if not myebuild: writemsg("!!! aux_get(): %s\n" % \ _("ebuild not found for '%s'") % mycpv, noiselevel=1) raise KeyError(mycpv) mydata, st, emtime = self._pull_valid_cache(mycpv, myebuild, mylocation) doregen = mydata is None if doregen: if myebuild in self._broken_ebuilds: raise KeyError(mycpv) if not self._have_root_eclass_dir: raise KeyError(mycpv) self.doebuild_settings.setcpv(mycpv) mydata = {} eapi = None if 'parse-eapi-glep-55' in self.doebuild_settings.features: pf, eapi = portage._split_ebuild_name_glep55( os.path.basename(myebuild)) if eapi is None and \ 'parse-eapi-ebuild-head' in self.doebuild_settings.features: eapi = portage._parse_eapi_ebuild_head(codecs.open( _unicode_encode(myebuild, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace')) if eapi is not None: self.doebuild_settings.configdict['pkg']['EAPI'] = eapi if eapi is not None and not portage.eapi_is_supported(eapi): mydata['EAPI'] = eapi else: myret = doebuild(myebuild, "depend", self.doebuild_settings["ROOT"], self.doebuild_settings, dbkey=mydata, tree="porttree", mydbapi=self) if myret != os.EX_OK: self._broken_ebuilds.add(myebuild) raise KeyError(mycpv) self._metadata_callback( mycpv, myebuild, mylocation, mydata, emtime) if mydata.get("INHERITED", False): mydata["_eclasses_"] = self._repo_info[mylocation ].eclass_db.get_eclass_data(mydata["INHERITED"].split()) else: mydata["_eclasses_"] = {} # do we have a origin repository name for the current package mydata["repository"] = self._repository_map.get(mylocation, "") mydata["INHERITED"] = ' '.join(mydata.get("_eclasses_", [])) mydata["_mtime_"] = st[stat.ST_MTIME] eapi = mydata.get("EAPI") if not eapi: eapi = "0" mydata["EAPI"] = eapi if not eapi_is_supported(eapi): for k in set(mydata).difference(("_mtime_", "_eclasses_")): mydata[k] = "" mydata["EAPI"] = "-" + eapi.lstrip("-") #finally, we look at our internal cache entry and return the requested data. returnme = [mydata.get(x, "") for x in mylist] if cache_me: aux_cache = {} for x in self._aux_cache_keys: aux_cache[x] = mydata.get(x, "") self._aux_cache[mycpv] = aux_cache return returnme
def digestcheck(myfiles, mysettings, strict=0, justmanifest=0): """ Verifies checksums. Assumes all files have been downloaded. @rtype: int @returns: 1 on success and 0 on failure """ if mysettings.get("EBUILD_SKIP_MANIFEST") == "1": return 1 pkgdir = mysettings["O"] manifest_path = os.path.join(pkgdir, "Manifest") if not os.path.exists(manifest_path): writemsg(_("!!! Manifest file not found: '%s'\n") % manifest_path, noiselevel=-1) if strict: return 0 else: return 1 mf = Manifest(pkgdir, mysettings["DISTDIR"]) manifest_empty = True for d in mf.fhashdict.values(): if d: manifest_empty = False break if manifest_empty: writemsg(_("!!! Manifest is empty: '%s'\n") % manifest_path, noiselevel=-1) if strict: return 0 else: return 1 eout = EOutput() eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1" try: if strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings: eout.ebegin(_("checking ebuild checksums ;-)")) mf.checkTypeHashes("EBUILD") eout.eend(0) eout.ebegin(_("checking auxfile checksums ;-)")) mf.checkTypeHashes("AUX") eout.eend(0) eout.ebegin(_("checking miscfile checksums ;-)")) mf.checkTypeHashes("MISC", ignoreMissingFiles=True) eout.eend(0) for f in myfiles: eout.ebegin(_("checking %s ;-)") % f) ftype = mf.findFile(f) if ftype is None: raise KeyError(f) mf.checkFileHashes(ftype, f) eout.eend(0) except KeyError as e: eout.eend(1) writemsg(_("\n!!! Missing digest for %s\n") % str(e), noiselevel=-1) return 0 except FileNotFound as e: eout.eend(1) writemsg(_("\n!!! A file listed in the Manifest could not be found: %s\n") % str(e), noiselevel=-1) return 0 except DigestException as e: eout.eend(1) writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1) writemsg("!!! %s\n" % e.value[0], noiselevel=-1) writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1) writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1) writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1) return 0 # Make sure that all of the ebuilds are actually listed in the Manifest. glep55 = 'parse-eapi-glep-55' in mysettings.features for f in os.listdir(pkgdir): pf = None if glep55: pf, eapi = _split_ebuild_name_glep55(f) elif f[-7:] == '.ebuild': pf = f[:-7] if pf is not None and not mf.hasFile("EBUILD", f): writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \ os.path.join(pkgdir, f), noiselevel=-1) if strict: return 0 """ epatch will just grab all the patches out of a directory, so we have to make sure there aren't any foreign files that it might grab.""" filesdir = os.path.join(pkgdir, "files") for parent, dirs, files in os.walk(filesdir): try: parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: parent = _unicode_decode(parent, encoding=_encodings['fs'], errors='replace') writemsg(_("!!! Path contains invalid " "character(s) for encoding '%s': '%s'") \ % (_encodings['fs'], parent), noiselevel=-1) if strict: return 0 continue for d in dirs: d_bytes = d try: d = _unicode_decode(d, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: d = _unicode_decode(d, encoding=_encodings['fs'], errors='replace') writemsg(_("!!! Path contains invalid " "character(s) for encoding '%s': '%s'") \ % (_encodings['fs'], os.path.join(parent, d)), noiselevel=-1) if strict: return 0 dirs.remove(d_bytes) continue if d.startswith(".") or d == "CVS": dirs.remove(d_bytes) for f in files: try: f = _unicode_decode(f, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: f = _unicode_decode(f, encoding=_encodings['fs'], errors='replace') if f.startswith("."): continue f = os.path.join(parent, f)[len(filesdir) + 1:] writemsg(_("!!! File name contains invalid " "character(s) for encoding '%s': '%s'") \ % (_encodings['fs'], f), noiselevel=-1) if strict: return 0 continue if f.startswith("."): continue f = os.path.join(parent, f)[len(filesdir) + 1:] file_type = mf.findFile(f) if file_type != "AUX" and not f.startswith("digest-"): writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \ os.path.join(filesdir, f), noiselevel=-1) if strict: return 0 return 1
def create(self, checkExisting=False, assumeDistHashesSometimes=False, assumeDistHashesAlways=False, requiredDistfiles=[]): """ Recreate this Manifest from scratch. This will not use any existing checksums unless assumeDistHashesSometimes or assumeDistHashesAlways is true (assumeDistHashesSometimes will only cause DIST checksums to be reused if the file doesn't exist in DISTDIR). The requiredDistfiles parameter specifies a list of distfiles to raise a FileNotFound exception for (if no file or existing checksums are available), and defaults to all distfiles when not specified.""" if checkExisting: self.checkAllHashes() if assumeDistHashesSometimes or assumeDistHashesAlways: distfilehashes = self.fhashdict["DIST"] else: distfilehashes = {} self.__init__( self.pkgdir, self.distdir, fetchlist_dict=self.fetchlist_dict, from_scratch=True, manifest1_compat=False) cpvlist = [] pn = os.path.basename(self.pkgdir.rstrip(os.path.sep)) cat = self._pkgdir_category() pkgdir = self.pkgdir for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir): break for f in pkgdir_files: try: f = _unicode_decode( f, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue if f[:1] == ".": continue pf = None if portage._glep_55_enabled: pf, eapi = portage._split_ebuild_name_glep55(f) elif f[-7:] == '.ebuild': pf = f[:-7] if pf is not None: mytype = "EBUILD" ps = portage.versions._pkgsplit(pf) cpv = "%s/%s" % (cat, pf) if not ps: raise PortagePackageException( _("Invalid package name: '%s'") % cpv) if ps[0] != pn: raise PortagePackageException( _("Package name does not " "match directory name: '%s'") % cpv) cpvlist.append(cpv) elif manifest2MiscfileFilter(f): mytype = "MISC" else: continue self.fhashdict[mytype][f] = perform_multiple_checksums( self.pkgdir + f, self.hashes) recursive_files = [] pkgdir = self.pkgdir cut_len = len(os.path.join(pkgdir, "files") + os.sep) for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")): for f in files: try: f = _unicode_decode( f, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue full_path = os.path.join(parentdir, f) recursive_files.append(full_path[cut_len:]) for f in recursive_files: if not manifest2AuxfileFilter(f): continue self.fhashdict["AUX"][f] = perform_multiple_checksums( os.path.join(self.pkgdir, "files", f.lstrip(os.sep)), self.hashes) distlist = set() for cpv in cpvlist: distlist.update(self._getCpvDistfiles(cpv)) if requiredDistfiles is None: # This allows us to force removal of stale digests for the # ebuild --force digest option (no distfiles are required). requiredDistfiles = set() elif len(requiredDistfiles) == 0: # repoman passes in an empty list, which implies that all distfiles # are required. requiredDistfiles = distlist.copy() required_hash_types = set() required_hash_types.add("size") required_hash_types.add(portage.const.MANIFEST2_REQUIRED_HASH) for f in distlist: fname = os.path.join(self.distdir, f) mystat = None try: mystat = os.stat(fname) except OSError: pass if f in distfilehashes and \ not required_hash_types.difference(distfilehashes[f]) and \ ((assumeDistHashesSometimes and mystat is None) or \ (assumeDistHashesAlways and mystat is None) or \ (assumeDistHashesAlways and mystat is not None and \ len(distfilehashes[f]) == len(self.hashes) and \ distfilehashes[f]["size"] == mystat.st_size)): self.fhashdict["DIST"][f] = distfilehashes[f] else: try: self.fhashdict["DIST"][f] = perform_multiple_checksums( fname, self.hashes) except FileNotFound: if f in requiredDistfiles: raise
def create(self, checkExisting=False, assumeDistHashesSometimes=False, assumeDistHashesAlways=False, requiredDistfiles=[]): """ Recreate this Manifest from scratch. This will not use any existing checksums unless assumeDistHashesSometimes or assumeDistHashesAlways is true (assumeDistHashesSometimes will only cause DIST checksums to be reused if the file doesn't exist in DISTDIR). The requiredDistfiles parameter specifies a list of distfiles to raise a FileNotFound exception for (if no file or existing checksums are available), and defaults to all distfiles when not specified.""" if checkExisting: self.checkAllHashes() if assumeDistHashesSometimes or assumeDistHashesAlways: distfilehashes = self.fhashdict["DIST"] else: distfilehashes = {} self.__init__(self.pkgdir, self.distdir, fetchlist_dict=self.fetchlist_dict, from_scratch=True, manifest1_compat=False) cpvlist = [] pn = os.path.basename(self.pkgdir.rstrip(os.path.sep)) cat = self._pkgdir_category() pkgdir = self.pkgdir for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir): break for f in pkgdir_files: try: f = _unicode_decode(f, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue if f[:1] == ".": continue pf = None if portage._glep_55_enabled: pf, eapi = portage._split_ebuild_name_glep55(f) elif f[-7:] == '.ebuild': pf = f[:-7] if pf is not None: mytype = "EBUILD" ps = portage.versions._pkgsplit(pf) cpv = "%s/%s" % (cat, pf) if not ps: raise PortagePackageException( _("Invalid package name: '%s'") % cpv) if ps[0] != pn: raise PortagePackageException( _("Package name does not " "match directory name: '%s'") % cpv) cpvlist.append(cpv) elif manifest2MiscfileFilter(f): mytype = "MISC" else: continue self.fhashdict[mytype][f] = perform_multiple_checksums(self.pkgdir+f, self.hashes) recursive_files = [] pkgdir = self.pkgdir cut_len = len(os.path.join(pkgdir, "files") + os.sep) for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")): for f in files: try: f = _unicode_decode(f, encoding=_encodings['fs'], errors='strict') except UnicodeDecodeError: continue full_path = os.path.join(parentdir, f) recursive_files.append(full_path[cut_len:]) for f in recursive_files: if not manifest2AuxfileFilter(f): continue self.fhashdict["AUX"][f] = perform_multiple_checksums( os.path.join(self.pkgdir, "files", f.lstrip(os.sep)), self.hashes) distlist = set() for cpv in cpvlist: distlist.update(self._getCpvDistfiles(cpv)) if requiredDistfiles is None: # This allows us to force removal of stale digests for the # ebuild --force digest option (no distfiles are required). requiredDistfiles = set() elif len(requiredDistfiles) == 0: # repoman passes in an empty list, which implies that all distfiles # are required. requiredDistfiles = distlist.copy() required_hash_types = set() required_hash_types.add("size") required_hash_types.add(portage.const.MANIFEST2_REQUIRED_HASH) for f in distlist: fname = os.path.join(self.distdir, f) mystat = None try: mystat = os.stat(fname) except OSError: pass if f in distfilehashes and \ not required_hash_types.difference(distfilehashes[f]) and \ ((assumeDistHashesSometimes and mystat is None) or \ (assumeDistHashesAlways and mystat is None) or \ (assumeDistHashesAlways and mystat is not None and \ len(distfilehashes[f]) == len(self.hashes) and \ distfilehashes[f]["size"] == mystat.st_size)): self.fhashdict["DIST"][f] = distfilehashes[f] else: try: self.fhashdict["DIST"][f] = perform_multiple_checksums(fname, self.hashes) except FileNotFound: if f in requiredDistfiles: raise
def _start(self): settings = self.settings settings.setcpv(self.cpv) ebuild_path = self.ebuild_path eapi = None if 'parse-eapi-glep-55' in settings.features: pf, eapi = portage._split_ebuild_name_glep55( os.path.basename(ebuild_path)) if eapi is None and \ 'parse-eapi-ebuild-head' in settings.features: eapi = portage._parse_eapi_ebuild_head( codecs.open(_unicode_encode(ebuild_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace')) if eapi is not None: if not portage.eapi_is_supported(eapi): self.metadata_callback(self.cpv, self.ebuild_path, self.repo_path, {'EAPI' : eapi}, self.ebuild_mtime) self.returncode = os.EX_OK self.wait() return settings.configdict['pkg']['EAPI'] = eapi debug = settings.get("PORTAGE_DEBUG") == "1" master_fd = None slave_fd = None fd_pipes = None if self.fd_pipes is not None: fd_pipes = self.fd_pipes.copy() else: fd_pipes = {} fd_pipes.setdefault(0, sys.stdin.fileno()) fd_pipes.setdefault(1, sys.stdout.fileno()) fd_pipes.setdefault(2, sys.stderr.fileno()) # flush any pending output for fd in fd_pipes.values(): if fd == sys.stdout.fileno(): sys.stdout.flush() if fd == sys.stderr.fileno(): sys.stderr.flush() fd_pipes_orig = fd_pipes.copy() self._files = self._files_dict() files = self._files master_fd, slave_fd = os.pipe() fcntl.fcntl(master_fd, fcntl.F_SETFL, fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK) fd_pipes[self._metadata_fd] = slave_fd self._raw_metadata = [] files.ebuild = os.fdopen(master_fd, 'rb') self._reg_id = self.scheduler.register(files.ebuild.fileno(), self._registered_events, self._output_handler) self._registered = True retval = portage.doebuild(ebuild_path, "depend", settings["ROOT"], settings, debug, mydbapi=self.portdb, tree="porttree", fd_pipes=fd_pipes, returnpid=True) os.close(slave_fd) if isinstance(retval, int): # doebuild failed before spawning self._unregister() self.returncode = retval self.wait() return self.pid = retval[0] portage.process.spawned_pids.remove(self.pid)
def _start(self): settings = self.settings settings.setcpv(self.cpv) ebuild_path = self.ebuild_path eapi = None if 'parse-eapi-glep-55' in settings.features: pf, eapi = portage._split_ebuild_name_glep55( os.path.basename(ebuild_path)) if eapi is None and \ 'parse-eapi-ebuild-head' in settings.features: eapi = portage._parse_eapi_ebuild_head( codecs.open(_unicode_encode(ebuild_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace')) if eapi is not None: if not portage.eapi_is_supported(eapi): self.metadata_callback(self.cpv, self.ebuild_path, self.repo_path, {'EAPI': eapi}, self.ebuild_mtime) self.returncode = os.EX_OK self.wait() return settings.configdict['pkg']['EAPI'] = eapi debug = settings.get("PORTAGE_DEBUG") == "1" master_fd = None slave_fd = None fd_pipes = None if self.fd_pipes is not None: fd_pipes = self.fd_pipes.copy() else: fd_pipes = {} fd_pipes.setdefault(0, sys.stdin.fileno()) fd_pipes.setdefault(1, sys.stdout.fileno()) fd_pipes.setdefault(2, sys.stderr.fileno()) # flush any pending output for fd in fd_pipes.values(): if fd == sys.stdout.fileno(): sys.stdout.flush() if fd == sys.stderr.fileno(): sys.stderr.flush() fd_pipes_orig = fd_pipes.copy() self._files = self._files_dict() files = self._files master_fd, slave_fd = os.pipe() fcntl.fcntl(master_fd, fcntl.F_SETFL, fcntl.fcntl(master_fd, fcntl.F_GETFL) | os.O_NONBLOCK) fd_pipes[self._metadata_fd] = slave_fd self._raw_metadata = [] files.ebuild = os.fdopen(master_fd, 'rb') self._reg_id = self.scheduler.register(files.ebuild.fileno(), self._registered_events, self._output_handler) self._registered = True retval = portage.doebuild(ebuild_path, "depend", settings["ROOT"], settings, debug, mydbapi=self.portdb, tree="porttree", fd_pipes=fd_pipes, returnpid=True) os.close(slave_fd) if isinstance(retval, int): # doebuild failed before spawning self._unregister() self.returncode = retval self.wait() return self.pid = retval[0] portage.process.spawned_pids.remove(self.pid)