示例#1
0
def _finalize(mysettings, items):
    if len(items) == 0:
        return
    if len(items) == 1:
        count = _("one package")
    else:
        count = _("multiple packages")
    if "PORTAGE_ELOG_MAILURI" in mysettings:
        myrecipient = mysettings["PORTAGE_ELOG_MAILURI"].split()[0]
    else:
        myrecipient = "root@localhost"

    myfrom = mysettings.get("PORTAGE_ELOG_MAILFROM", "")
    myfrom = myfrom.replace("${HOST}", socket.getfqdn())
    mysubject = mysettings.get("PORTAGE_ELOG_MAILSUBJECT", "")
    mysubject = mysubject.replace("${PACKAGE}", count)
    mysubject = mysubject.replace("${HOST}", socket.getfqdn())

    mybody = _("elog messages for the following packages generated by "
               "process %(pid)d on host %(host)s:\n") % {
                   "pid": portage.getpid(),
                   "host": socket.getfqdn()
               }
    for key in items:
        mybody += "- %s\n" % key

    mymessage = portage.mail.create_message(myfrom,
                                            myrecipient,
                                            mysubject,
                                            mybody,
                                            attachments=list(items.values()))

    # Timeout after one minute in case send_mail() blocks indefinitely.
    try:
        try:
            AlarmSignal.register(60)
            portage.mail.send_mail(mysettings, mymessage)
        finally:
            AlarmSignal.unregister()
    except AlarmSignal:
        writemsg("Timeout in finalize() for elog system 'mail_summary'\n",
                 noiselevel=-1)
    except PortageException as e:
        writemsg("%s\n" % (e, ), noiselevel=-1)

    return
示例#2
0
def _finalize(mysettings, items):
    if len(items) == 0:
        return
    elif len(items) == 1:
        count = _("one package")
    else:
        count = _("multiple packages")
    if "PORTAGE_ELOG_MAILURI" in mysettings:
        myrecipient = mysettings["PORTAGE_ELOG_MAILURI"].split()[0]
    else:
        myrecipient = "root@localhost"

    myfrom = mysettings.get("PORTAGE_ELOG_MAILFROM", "")
    myfrom = myfrom.replace("${HOST}", socket.getfqdn())
    mysubject = mysettings.get("PORTAGE_ELOG_MAILSUBJECT", "")
    mysubject = mysubject.replace("${PACKAGE}", count)
    mysubject = mysubject.replace("${HOST}", socket.getfqdn())

    mybody = _("elog messages for the following packages generated by " "process %(pid)d on host %(host)s:\n") % {
        "pid": os.getpid(),
        "host": socket.getfqdn(),
    }
    for key in items:
        mybody += "- %s\n" % key

    mymessage = portage.mail.create_message(myfrom, myrecipient, mysubject, mybody, attachments=list(items.values()))

    # Timeout after one minute in case send_mail() blocks indefinitely.
    try:
        try:
            AlarmSignal.register(60)
            portage.mail.send_mail(mysettings, mymessage)
        finally:
            AlarmSignal.unregister()
    except AlarmSignal:
        writemsg("Timeout in finalize() for elog system 'mail_summary'\n", noiselevel=-1)
    except PortageException as e:
        writemsg("%s\n" % (e,), noiselevel=-1)

    return
示例#3
0
	def _populate_remote(self, getbinpkg_refresh=True):

		self._remote_has_index = False
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)

			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			try:
				download_timestamp = \
					float(pkgindex.header.get("DOWNLOAD_TIMESTAMP", 0))
			except ValueError:
				download_timestamp = 0
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				f = None

				if not getbinpkg_refresh and local_timestamp:
					raise UseCachedCopyOfRemoteIndex()

				try:
					ttl = float(pkgindex.header.get("TTL", 0))
				except ValueError:
					pass
				else:
					if download_timestamp and ttl and \
						download_timestamp + ttl > time.time():
						raise UseCachedCopyOfRemoteIndex()

				# Don't use urlopen for https, unless
				# PEP 476 is supported (bug #469888).
				if parsed_url.scheme not in ('https',) or _have_pep_476():
					try:
						f = _urlopen(url, if_modified_since=local_timestamp)
						if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
							remote_timestamp = f.headers.get('timestamp')
					except IOError as err:
						if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
							raise UseCachedCopyOfRemoteIndex()

						if parsed_url.scheme in ('ftp', 'http', 'https'):
							# This protocol is supposedly supported by urlopen,
							# so apparently there's a problem with the url
							# or a bug in urlopen.
							if self.settings.get("PORTAGE_DEBUG", "0") != "0":
								traceback.print_exc()

							raise
					except ValueError:
						raise ParseError("Invalid Portage BINHOST value '%s'"
										 % url.lstrip())

				if f is None:

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'ssh':
						# Use a pipe so that we can terminate the download
						# early if we detect that the TIMESTAMP header
						# matches that of the cached Packages file.
						ssh_args = ['ssh']
						if port is not None:
							ssh_args.append("-p%s" % (port,))
						# NOTE: shlex evaluates embedded quotes
						ssh_args.extend(portage.util.shlex_split(
							self.settings.get("PORTAGE_SSH_OPTS", "")))
						ssh_args.append(user_passwd + host)
						ssh_args.append('--')
						ssh_args.append('cat')
						ssh_args.append(path)

						proc = subprocess.Popen(ssh_args,
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							fcmd = self.settings.get('FETCHCOMMAND')
							if not fcmd:
								raise EnvironmentError("FETCHCOMMAND is unset")

						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)

						fcmd_vars = {
							"DISTDIR": tmp_dirname,
							"FILE": tmp_basename,
							"URI": url
						}

						for k in ("PORTAGE_SSH_OPTS",):
							v = self.settings.get(k)
							if v is not None:
								fcmd_vars[k] = v

						success = portage.getbinpkg.file_get(
							fcmd=fcmd, fcmd_vars=fcmd_vars)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				# This includes URLError which is raised for SSL
				# certificate errors when PEP 476 is supported.
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				# With Python 2, the EnvironmentError message may
				# contain bytes or unicode, so use _unicode to ensure
				# safety with all locales (bug #532784).
				try:
					error_msg = _unicode(e)
				except UnicodeDecodeError as uerror:
					error_msg = _unicode(uerror.object,
						encoding='utf_8', errors='replace')
				writemsg("!!! %s\n\n" % error_msg)
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for d in pkgindex.packages:
					cpv = _pkg_str(d["CPV"], metadata=d,
						settings=self.settings)
					# Local package instances override remote instances
					# with the same instance_key.
					if self.dbapi.cpv_exists(cpv):
						continue

					d["CPV"] = cpv
					d["BASE_URI"] = remote_base_uri
					d["PKGINDEX_URI"] = url
					self._remotepkgs[self.dbapi._instance_key(cpv)] = d
					self.dbapi.cpv_inject(cpv)

				self._remote_has_index = True
示例#4
0
def elog_process(cpv, mysettings, phasefilter=None):
    global _elog_atexit_handlers

    logsystems = mysettings.get("PORTAGE_ELOG_SYSTEM", "").split()
    for s in logsystems:
        # allow per module overrides of PORTAGE_ELOG_CLASSES
        if ":" in s:
            s, levels = s.split(":", 1)
            levels = levels.split(",")
        # - is nicer than _ for module names, so allow people to use it.
        s = s.replace("-", "_")
        try:
            _load_mod("portage.elog.mod_" + s)
        except ImportError:
            pass

    if "T" in mysettings:
        ebuild_logentries = collect_ebuild_messages(
            os.path.join(mysettings["T"], "logging"))
    else:
        # A build dir isn't necessarily required since the messages.e*
        # functions allow messages to be generated in-memory.
        ebuild_logentries = {}
    all_logentries = collect_messages(key=cpv, phasefilter=phasefilter)
    if cpv in all_logentries:
        # Messages generated by the python elog implementation are assumed
        # to come first. For example, this ensures correct order for einfo
        # messages that are generated prior to the setup phase.
        all_logentries[cpv] = \
         _merge_logentries(all_logentries[cpv], ebuild_logentries)
    else:
        all_logentries[cpv] = ebuild_logentries

    my_elog_classes = set(mysettings.get("PORTAGE_ELOG_CLASSES", "").split())
    logsystems = {}
    for token in mysettings.get("PORTAGE_ELOG_SYSTEM", "").split():
        if ":" in token:
            s, levels = token.split(":", 1)
            levels = levels.split(",")
        else:
            s = token
            levels = ()
        levels_set = logsystems.get(s)
        if levels_set is None:
            levels_set = set()
            logsystems[s] = levels_set
        levels_set.update(levels)

    for key in all_logentries:
        default_logentries = filter_loglevels(all_logentries[key],
                                              my_elog_classes)

        # in case the filters matched all messages and no module overrides exist
        if len(default_logentries) == 0 and (not ":" in mysettings.get(
                "PORTAGE_ELOG_SYSTEM", "")):
            continue

        default_fulllog = _combine_logentries(default_logentries)

        # call listeners
        for listener in _elog_listeners:
            listener(mysettings, str(key), default_logentries, default_fulllog)

        # pass the processing to the individual modules
        for s, levels in logsystems.items():
            # allow per module overrides of PORTAGE_ELOG_CLASSES
            if levels:
                mod_logentries = filter_loglevels(all_logentries[key], levels)
                mod_fulllog = _combine_logentries(mod_logentries)
            else:
                mod_logentries = default_logentries
                mod_fulllog = default_fulllog
            if len(mod_logentries) == 0:
                continue
            # - is nicer than _ for module names, so allow people to use it.
            s = s.replace("-", "_")
            try:
                m = _load_mod("portage.elog.mod_" + s)
                # Timeout after one minute (in case something like the mail
                # module gets hung).
                try:
                    AlarmSignal.register(60)
                    m.process(mysettings, str(key), mod_logentries,
                              mod_fulllog)
                finally:
                    AlarmSignal.unregister()
                if hasattr(m, "finalize"
                           ) and not m.finalize in _elog_atexit_handlers:
                    _elog_atexit_handlers.append(m.finalize)
                    atexit_register(m.finalize)
            except (ImportError, AttributeError) as e:
                writemsg(
                    _("!!! Error while importing logging modules "
                      "while loading \"mod_%s\":\n") % str(s))
                writemsg("%s\n" % str(e), noiselevel=-1)
            except AlarmSignal:
                writemsg("Timeout in elog_process for system '%s'\n" % s,
                         noiselevel=-1)
            except PortageException as e:
                writemsg("%s\n" % str(e), noiselevel=-1)
示例#5
0
	def _populate(self, getbinpkgs=0):
		if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
			return 0

		# Clear all caches in case populate is called multiple times
		# as may be the case when _global_updates calls populate()
		# prior to performing package moves since it only wants to
		# operate on local packages (getbinpkgs=0).
		self._remotepkgs = None
		self.dbapi.clear()
		_instance_key = self.dbapi._instance_key
		if True:
			pkg_paths = {}
			self._pkg_paths = pkg_paths
			dir_files = {}
			for parent, dir_names, file_names in os.walk(self.pkgdir):
				relative_parent = parent[len(self.pkgdir)+1:]
				dir_files[relative_parent] = file_names

			pkgindex = self._load_pkgindex()
			if not self._pkgindex_version_supported(pkgindex):
				pkgindex = self._new_pkgindex()
			header = pkgindex.header
			metadata = {}
			basename_index = {}
			for d in pkgindex.packages:
				cpv = _pkg_str(d["CPV"], metadata=d,
					settings=self.settings)
				d["CPV"] = cpv
				metadata[_instance_key(cpv)] = d
				path = d.get("PATH")
				if not path:
					path = cpv + ".tbz2"
				basename = os.path.basename(path)
				basename_index.setdefault(basename, []).append(d)

			update_pkgindex = False
			for mydir, file_names in dir_files.items():
				try:
					mydir = _unicode_decode(mydir,
						encoding=_encodings["fs"], errors="strict")
				except UnicodeDecodeError:
					continue
				for myfile in file_names:
					try:
						myfile = _unicode_decode(myfile,
							encoding=_encodings["fs"], errors="strict")
					except UnicodeDecodeError:
						continue
					if not myfile.endswith(SUPPORTED_XPAK_EXTENSIONS):
						continue
					mypath = os.path.join(mydir, myfile)
					full_path = os.path.join(self.pkgdir, mypath)
					s = os.lstat(full_path)

					if not stat.S_ISREG(s.st_mode):
						continue

					# Validate data from the package index and try to avoid
					# reading the xpak if possible.
					possibilities = basename_index.get(myfile)
					if possibilities:
						match = None
						for d in possibilities:
							try:
								if long(d["_mtime_"]) != s[stat.ST_MTIME]:
									continue
							except (KeyError, ValueError):
								continue
							try:
								if long(d["SIZE"]) != long(s.st_size):
									continue
							except (KeyError, ValueError):
								continue
							if not self._pkgindex_keys.difference(d):
								match = d
								break
						if match:
							mycpv = match["CPV"]
							instance_key = _instance_key(mycpv)
							pkg_paths[instance_key] = mypath
							# update the path if the package has been moved
							oldpath = d.get("PATH")
							if oldpath and oldpath != mypath:
								update_pkgindex = True
							# Omit PATH if it is the default path for
							# the current Packages format version.
							if mypath != mycpv + ".tbz2":
								d["PATH"] = mypath
								if not oldpath:
									update_pkgindex = True
							else:
								d.pop("PATH", None)
								if oldpath:
									update_pkgindex = True
							self.dbapi.cpv_inject(mycpv)
							continue
					if not os.access(full_path, os.R_OK):
						writemsg(_("!!! Permission denied to read " \
							"binary package: '%s'\n") % full_path,
							noiselevel=-1)
						self.invalids.append(myfile[:-5])
						continue
					pkg_metadata = self._read_metadata(full_path, s,
						keys=chain(self.dbapi._aux_cache_keys,
						("PF", "CATEGORY")))
					mycat = pkg_metadata.get("CATEGORY", "")
					mypf = pkg_metadata.get("PF", "")
					slot = pkg_metadata.get("SLOT", "")
					mypkg = myfile[:-5]
					if not mycat or not mypf or not slot:
						#old-style or corrupt package
						writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
							noiselevel=-1)
						missing_keys = []
						if not mycat:
							missing_keys.append("CATEGORY")
						if not mypf:
							missing_keys.append("PF")
						if not slot:
							missing_keys.append("SLOT")
						msg = []
						if missing_keys:
							missing_keys.sort()
							msg.append(_("Missing metadata key(s): %s.") % \
								", ".join(missing_keys))
						msg.append(_(" This binary package is not " \
							"recoverable and should be deleted."))
						for line in textwrap.wrap("".join(msg), 72):
							writemsg("!!! %s\n" % line, noiselevel=-1)
						self.invalids.append(mypkg)
						continue

					multi_instance = False
					invalid_name = False
					build_id = None
					if myfile.endswith(".xpak"):
						multi_instance = True
						build_id = self._parse_build_id(myfile)
						if build_id < 1:
							invalid_name = True
						elif myfile != "%s-%s.xpak" % (
							mypf, build_id):
							invalid_name = True
						else:
							mypkg = mypkg[:-len(str(build_id))-1]
					elif myfile != mypf + ".tbz2":
						invalid_name = True

					if invalid_name:
						writemsg(_("\n!!! Binary package name is "
							"invalid: '%s'\n") % full_path,
							noiselevel=-1)
						continue

					if pkg_metadata.get("BUILD_ID"):
						try:
							build_id = long(pkg_metadata["BUILD_ID"])
						except ValueError:
							writemsg(_("!!! Binary package has "
								"invalid BUILD_ID: '%s'\n") %
								full_path, noiselevel=-1)
							continue
					else:
						build_id = None

					if multi_instance:
						name_split = catpkgsplit("%s/%s" %
							(mycat, mypf))
						if (name_split is None or
							tuple(catsplit(mydir)) != name_split[:2]):
							continue
					elif mycat != mydir and mydir != "All":
						continue
					if mypkg != mypf.strip():
						continue
					mycpv = mycat + "/" + mypkg
					if not self.dbapi._category_re.match(mycat):
						writemsg(_("!!! Binary package has an " \
							"unrecognized category: '%s'\n") % full_path,
							noiselevel=-1)
						writemsg(_("!!! '%s' has a category that is not" \
							" listed in %setc/portage/categories\n") % \
							(mycpv, self.settings["PORTAGE_CONFIGROOT"]),
							noiselevel=-1)
						continue
					if build_id is not None:
						pkg_metadata["BUILD_ID"] = _unicode(build_id)
					pkg_metadata["SIZE"] = _unicode(s.st_size)
					# Discard items used only for validation above.
					pkg_metadata.pop("CATEGORY")
					pkg_metadata.pop("PF")
					mycpv = _pkg_str(mycpv,
						metadata=self.dbapi._aux_cache_slot_dict(
						pkg_metadata))
					pkg_paths[_instance_key(mycpv)] = mypath
					self.dbapi.cpv_inject(mycpv)
					update_pkgindex = True
					d = metadata.get(_instance_key(mycpv),
						pkgindex._pkg_slot_dict())
					if d:
						try:
							if long(d["_mtime_"]) != s[stat.ST_MTIME]:
								d.clear()
						except (KeyError, ValueError):
							d.clear()
					if d:
						try:
							if long(d["SIZE"]) != long(s.st_size):
								d.clear()
						except (KeyError, ValueError):
							d.clear()

					for k in self._pkgindex_allowed_pkg_keys:
						v = pkg_metadata.get(k)
						if v is not None:
							d[k] = v
					d["CPV"] = mycpv

					try:
						self._eval_use_flags(mycpv, d)
					except portage.exception.InvalidDependString:
						writemsg(_("!!! Invalid binary package: '%s'\n") % \
							self.getname(mycpv), noiselevel=-1)
						self.dbapi.cpv_remove(mycpv)
						del pkg_paths[_instance_key(mycpv)]

					# record location if it's non-default
					if mypath != mycpv + ".tbz2":
						d["PATH"] = mypath
					else:
						d.pop("PATH", None)
					metadata[_instance_key(mycpv)] = d

			for instance_key in list(metadata):
				if instance_key not in pkg_paths:
					del metadata[instance_key]

			# Do not bother to write the Packages index if $PKGDIR/All/ exists
			# since it will provide no benefit due to the need to read CATEGORY
			# from xpak.
			if update_pkgindex and os.access(self.pkgdir, os.W_OK):
				del pkgindex.packages[:]
				pkgindex.packages.extend(iter(metadata.values()))
				self._update_pkgindex_header(pkgindex.header)
				self._pkgindex_write(pkgindex)

		if getbinpkgs and not self.settings.get("PORTAGE_BINHOST"):
			writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
				noiselevel=-1)

		if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
			self.populated=1
			return
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)
			port_args = []
			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			try:
				download_timestamp = \
					float(pkgindex.header.get("DOWNLOAD_TIMESTAMP", 0))
			except ValueError:
				download_timestamp = 0
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				f = None

				try:
					ttl = float(pkgindex.header.get("TTL", 0))
				except ValueError:
					pass
				else:
					if download_timestamp and ttl and \
						download_timestamp + ttl > time.time():
						raise UseCachedCopyOfRemoteIndex()

				# Don't use urlopen for https, since it doesn't support
				# certificate/hostname verification (bug #469888).
				if parsed_url.scheme not in ('https',):
					try:
						f = _urlopen(url, if_modified_since=local_timestamp)
						if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
							remote_timestamp = f.headers.get('timestamp')
					except IOError as err:
						if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
							raise UseCachedCopyOfRemoteIndex()

						if parsed_url.scheme in ('ftp', 'http', 'https'):
							# This protocol is supposedly supported by urlopen,
							# so apparently there's a problem with the url
							# or a bug in urlopen.
							if self.settings.get("PORTAGE_DEBUG", "0") != "0":
								traceback.print_exc()

							raise
					except ValueError:
						raise ParseError("Invalid Portage BINHOST value '%s'"
										 % url.lstrip())

				if f is None:

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'ssh':
						# Use a pipe so that we can terminate the download
						# early if we detect that the TIMESTAMP header
						# matches that of the cached Packages file.
						ssh_args = ['ssh']
						if port is not None:
							ssh_args.append("-p%s" % (port,))
						# NOTE: shlex evaluates embedded quotes
						ssh_args.extend(portage.util.shlex_split(
							self.settings.get("PORTAGE_SSH_OPTS", "")))
						ssh_args.append(user_passwd + host)
						ssh_args.append('--')
						ssh_args.append('cat')
						ssh_args.append(path)

						proc = subprocess.Popen(ssh_args,
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							fcmd = self.settings.get('FETCHCOMMAND')
							if not fcmd:
								raise EnvironmentError("FETCHCOMMAND is unset")

						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)

						fcmd_vars = {
							"DISTDIR": tmp_dirname,
							"FILE": tmp_basename,
							"URI": url
						}

						for k in ("PORTAGE_SSH_OPTS",):
							v = self.settings.get(k)
							if v is not None:
								fcmd_vars[k] = v

						success = portage.getbinpkg.file_get(
							fcmd=fcmd, fcmd_vars=fcmd_vars)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				# With Python 2, the EnvironmentError message may
				# contain bytes or unicode, so use _unicode to ensure
				# safety with all locales (bug #532784).
				try:
					error_msg = _unicode(e)
				except UnicodeDecodeError as uerror:
					error_msg = _unicode(uerror.object,
						encoding='utf_8', errors='replace')
				writemsg("!!! %s\n\n" % error_msg)
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for d in pkgindex.packages:
					cpv = _pkg_str(d["CPV"], metadata=d,
						settings=self.settings)
					instance_key = _instance_key(cpv)
					# Local package instances override remote instances
					# with the same instance_key.
					if instance_key in metadata:
						continue

					d["CPV"] = cpv
					d["BASE_URI"] = remote_base_uri
					d["PKGINDEX_URI"] = url
					self._remotepkgs[instance_key] = d
					metadata[instance_key] = d
					self.dbapi.cpv_inject(cpv)

				self._remote_has_index = True

		self.populated=1
示例#6
0
	def _populate(self, getbinpkgs=0):
		if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
			return 0

		# Clear all caches in case populate is called multiple times
		# as may be the case when _global_updates calls populate()
		# prior to performing package moves since it only wants to
		# operate on local packages (getbinpkgs=0).
		self._remotepkgs = None
		self.dbapi._clear_cache()
		self.dbapi._aux_cache.clear()
		if True:
			pkg_paths = {}
			self._pkg_paths = pkg_paths
			dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
			if "All" in dirs:
				dirs.remove("All")
			dirs.sort()
			dirs.insert(0, "All")
			pkgindex = self._load_pkgindex()
			pf_index = None
			if not self._pkgindex_version_supported(pkgindex):
				pkgindex = self._new_pkgindex()
			header = pkgindex.header
			metadata = {}
			for d in pkgindex.packages:
				metadata[d["CPV"]] = d
			update_pkgindex = False
			for mydir in dirs:
				for myfile in listdir(os.path.join(self.pkgdir, mydir)):
					if not myfile.endswith(".tbz2"):
						continue
					mypath = os.path.join(mydir, myfile)
					full_path = os.path.join(self.pkgdir, mypath)
					s = os.lstat(full_path)
					if stat.S_ISLNK(s.st_mode):
						continue

					# Validate data from the package index and try to avoid
					# reading the xpak if possible.
					if mydir != "All":
						possibilities = None
						d = metadata.get(mydir+"/"+myfile[:-5])
						if d:
							possibilities = [d]
					else:
						if pf_index is None:
							pf_index = {}
							for mycpv in metadata:
								mycat, mypf = catsplit(mycpv)
								pf_index.setdefault(
									mypf, []).append(metadata[mycpv])
						possibilities = pf_index.get(myfile[:-5])
					if possibilities:
						match = None
						for d in possibilities:
							try:
								if long(d["MTIME"]) != s[stat.ST_MTIME]:
									continue
							except (KeyError, ValueError):
								continue
							try:
								if long(d["SIZE"]) != long(s.st_size):
									continue
							except (KeyError, ValueError):
								continue
							if not self._pkgindex_keys.difference(d):
								match = d
								break
						if match:
							mycpv = match["CPV"]
							if mycpv in pkg_paths:
								# discard duplicates (All/ is preferred)
								continue
							mycpv = _pkg_str(mycpv)
							pkg_paths[mycpv] = mypath
							# update the path if the package has been moved
							oldpath = d.get("PATH")
							if oldpath and oldpath != mypath:
								update_pkgindex = True
							if mypath != mycpv + ".tbz2":
								d["PATH"] = mypath
								if not oldpath:
									update_pkgindex = True
							else:
								d.pop("PATH", None)
								if oldpath:
									update_pkgindex = True
							self.dbapi.cpv_inject(mycpv)
							if not self.dbapi._aux_cache_keys.difference(d):
								aux_cache = self.dbapi._aux_cache_slot_dict()
								for k in self.dbapi._aux_cache_keys:
									aux_cache[k] = d[k]
								self.dbapi._aux_cache[mycpv] = aux_cache
							continue
					if not os.access(full_path, os.R_OK):
						writemsg(_("!!! Permission denied to read " \
							"binary package: '%s'\n") % full_path,
							noiselevel=-1)
						self.invalids.append(myfile[:-5])
						continue
					metadata_bytes = portage.xpak.tbz2(full_path).get_data()
					mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
						encoding=_encodings['repo.content'], errors='replace')
					mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
						encoding=_encodings['repo.content'], errors='replace')
					slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
						encoding=_encodings['repo.content'], errors='replace')
					mypkg = myfile[:-5]
					if not mycat or not mypf or not slot:
						#old-style or corrupt package
						writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
							noiselevel=-1)
						missing_keys = []
						if not mycat:
							missing_keys.append("CATEGORY")
						if not mypf:
							missing_keys.append("PF")
						if not slot:
							missing_keys.append("SLOT")
						msg = []
						if missing_keys:
							missing_keys.sort()
							msg.append(_("Missing metadata key(s): %s.") % \
								", ".join(missing_keys))
						msg.append(_(" This binary package is not " \
							"recoverable and should be deleted."))
						for line in textwrap.wrap("".join(msg), 72):
							writemsg("!!! %s\n" % line, noiselevel=-1)
						self.invalids.append(mypkg)
						continue
					mycat = mycat.strip()
					slot = slot.strip()
					if mycat != mydir and mydir != "All":
						continue
					if mypkg != mypf.strip():
						continue
					mycpv = mycat + "/" + mypkg
					if mycpv in pkg_paths:
						# All is first, so it's preferred.
						continue
					if not self.dbapi._category_re.match(mycat):
						writemsg(_("!!! Binary package has an " \
							"unrecognized category: '%s'\n") % full_path,
							noiselevel=-1)
						writemsg(_("!!! '%s' has a category that is not" \
							" listed in %setc/portage/categories\n") % \
							(mycpv, self.settings["PORTAGE_CONFIGROOT"]),
							noiselevel=-1)
						continue
					mycpv = _pkg_str(mycpv)
					pkg_paths[mycpv] = mypath
					self.dbapi.cpv_inject(mycpv)
					update_pkgindex = True
					d = metadata.get(mycpv, {})
					if d:
						try:
							if long(d["MTIME"]) != s[stat.ST_MTIME]:
								d.clear()
						except (KeyError, ValueError):
							d.clear()
					if d:
						try:
							if long(d["SIZE"]) != long(s.st_size):
								d.clear()
						except (KeyError, ValueError):
							d.clear()

					d["CPV"] = mycpv
					d["SLOT"] = slot
					d["MTIME"] = str(s[stat.ST_MTIME])
					d["SIZE"] = str(s.st_size)

					d.update(zip(self._pkgindex_aux_keys,
						self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
					try:
						self._eval_use_flags(mycpv, d)
					except portage.exception.InvalidDependString:
						writemsg(_("!!! Invalid binary package: '%s'\n") % \
							self.getname(mycpv), noiselevel=-1)
						self.dbapi.cpv_remove(mycpv)
						del pkg_paths[mycpv]

					# record location if it's non-default
					if mypath != mycpv + ".tbz2":
						d["PATH"] = mypath
					else:
						d.pop("PATH", None)
					metadata[mycpv] = d
					if not self.dbapi._aux_cache_keys.difference(d):
						aux_cache = self.dbapi._aux_cache_slot_dict()
						for k in self.dbapi._aux_cache_keys:
							aux_cache[k] = d[k]
						self.dbapi._aux_cache[mycpv] = aux_cache

			for cpv in list(metadata):
				if cpv not in pkg_paths:
					del metadata[cpv]

			# Do not bother to write the Packages index if $PKGDIR/All/ exists
			# since it will provide no benefit due to the need to read CATEGORY
			# from xpak.
			if update_pkgindex and os.access(self.pkgdir, os.W_OK):
				del pkgindex.packages[:]
				pkgindex.packages.extend(iter(metadata.values()))
				self._update_pkgindex_header(pkgindex.header)
				self._pkgindex_write(pkgindex)

		if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
			writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
				noiselevel=-1)

		if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
			self.populated=1
			return
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)
			port_args = []
			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				try:
					f = _urlopen(url, if_modified_since=local_timestamp)
					if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
						remote_timestamp = f.headers.get('timestamp')
				except IOError as err:
					if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
						raise UseCachedCopyOfRemoteIndex()

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'sftp':
						# The sftp command complains about 'Illegal seek' if
						# we try to make it write to /dev/stdout, so use a
						# temp file instead.
						fd, tmp_filename = tempfile.mkstemp()
						os.close(fd)
						if port is not None:
							port_args = ['-P', "%s" % (port,)]
						proc = subprocess.Popen(['sftp'] + port_args + \
							[user_passwd + host + ":" + path, tmp_filename])
						if proc.wait() != os.EX_OK:
							raise
						f = open(tmp_filename, 'rb')
					elif parsed_url.scheme == 'ssh':
						if port is not None:
							port_args = ['-p', "%s" % (port,)]
						proc = subprocess.Popen(['ssh'] + port_args + \
							[user_passwd + host, '--', 'cat', path],
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							raise
						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)
						success = portage.getbinpkg.file_get(url,
						     tmp_dirname, fcmd=fcmd, filename=tmp_basename)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				writemsg("!!! %s\n\n" % str(e))
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				# Organize remote package list as a cpv -> metadata map.
				remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for cpv, remote_metadata in remotepkgs.items():
					remote_metadata["BASE_URI"] = remote_base_uri
					self._pkgindex_uri[cpv] = url
				self._remotepkgs.update(remotepkgs)
				self._remote_has_index = True
				for cpv in remotepkgs:
					self.dbapi.cpv_inject(cpv)
				if True:
					# Remote package instances override local package
					# if they are not identical.
					hash_names = ["SIZE"] + self._pkgindex_hashes
					for cpv, local_metadata in metadata.items():
						remote_metadata = self._remotepkgs.get(cpv)
						if remote_metadata is None:
							continue
						# Use digests to compare identity.
						identical = True
						for hash_name in hash_names:
							local_value = local_metadata.get(hash_name)
							if local_value is None:
								continue
							remote_value = remote_metadata.get(hash_name)
							if remote_value is None:
								continue
							if local_value != remote_value:
								identical = False
								break
						if identical:
							del self._remotepkgs[cpv]
						else:
							# Override the local package in the aux_get cache.
							self.dbapi._aux_cache[cpv] = remote_metadata
				else:
					# Local package instances override remote instances.
					for cpv in metadata:
						self._remotepkgs.pop(cpv, None)
				continue
			try:
				chunk_size = long(self.settings["PORTAGE_BINHOST_CHUNKSIZE"])
				if chunk_size < 8:
					chunk_size = 8
			except (ValueError, KeyError):
				chunk_size = 3000
			writemsg_stdout("\n")
			writemsg_stdout(
				colorize("GOOD", _("Fetching bininfo from ")) + \
				_hide_url_passwd(base_url) + "\n")
			remotepkgs = portage.getbinpkg.dir_get_metadata(
				base_url, chunk_size=chunk_size)

			for mypkg, remote_metadata in remotepkgs.items():
				mycat = remote_metadata.get("CATEGORY")
				if mycat is None:
					#old-style or corrupt package
					writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
						noiselevel=-1)
					continue
				mycat = mycat.strip()
				try:
					fullpkg = _pkg_str(mycat+"/"+mypkg[:-5])
				except InvalidData:
					writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
						noiselevel=-1)
					continue

				if fullpkg in metadata:
					# When using this old protocol, comparison with the remote
					# package isn't supported, so the local package is always
					# preferred even if getbinpkgsonly is enabled.
					continue

				if not self.dbapi._category_re.match(mycat):
					writemsg(_("!!! Remote binary package has an " \
						"unrecognized category: '%s'\n") % fullpkg,
						noiselevel=-1)
					writemsg(_("!!! '%s' has a category that is not" \
						" listed in %setc/portage/categories\n") % \
						(fullpkg, self.settings["PORTAGE_CONFIGROOT"]),
						noiselevel=-1)
					continue
				mykey = portage.cpv_getkey(fullpkg)
				try:
					# invalid tbz2's can hurt things.
					self.dbapi.cpv_inject(fullpkg)
					for k, v in remote_metadata.items():
						remote_metadata[k] = v.strip()
					remote_metadata["BASE_URI"] = base_url

					# Eliminate metadata values with names that digestCheck
					# uses, since they are not valid when using the old
					# protocol. Typically this is needed for SIZE metadata
					# which corresponds to the size of the unpacked files
					# rather than the binpkg file size, triggering digest
					# verification failures as reported in bug #303211.
					remote_metadata.pop('SIZE', None)
					for k in portage.checksum.hashfunc_map:
						remote_metadata.pop(k, None)

					self._remotepkgs[fullpkg] = remote_metadata
				except SystemExit as e:
					raise
				except:
					writemsg(_("!!! Failed to inject remote binary package: %s\n") % fullpkg,
						noiselevel=-1)
					continue
		self.populated=1
示例#7
0
def elog_process(cpv, mysettings, phasefilter=None):
	global _elog_atexit_handlers
	
	logsystems = mysettings.get("PORTAGE_ELOG_SYSTEM","").split()
	for s in logsystems:
		# allow per module overrides of PORTAGE_ELOG_CLASSES
		if ":" in s:
			s, levels = s.split(":", 1)
			levels = levels.split(",")
		# - is nicer than _ for module names, so allow people to use it.
		s = s.replace("-", "_")
		try:
			_load_mod("portage.elog.mod_" + s)
		except ImportError:
			pass

	if "T" in mysettings:
		ebuild_logentries = collect_ebuild_messages(
			os.path.join(mysettings["T"], "logging"))
	else:
		# A build dir isn't necessarily required since the messages.e*
		# functions allow messages to be generated in-memory.
		ebuild_logentries = {}
	all_logentries = collect_messages(key=cpv, phasefilter=phasefilter)
	if cpv in all_logentries:
		# Messages generated by the python elog implementation are assumed
		# to come first. For example, this ensures correct order for einfo
		# messages that are generated prior to the setup phase.
		all_logentries[cpv] = \
			_merge_logentries(all_logentries[cpv], ebuild_logentries)
	else:
		all_logentries[cpv] = ebuild_logentries

	my_elog_classes = set(mysettings.get("PORTAGE_ELOG_CLASSES", "").split())
	logsystems = {}
	for token in mysettings.get("PORTAGE_ELOG_SYSTEM", "").split():
		if ":" in token:
			s, levels = token.split(":", 1)
			levels = levels.split(",")
		else:
			s = token
			levels = ()
		levels_set = logsystems.get(s)
		if levels_set is None:
			levels_set = set()
			logsystems[s] = levels_set
		levels_set.update(levels)

	for key in all_logentries:
		default_logentries = filter_loglevels(all_logentries[key], my_elog_classes)

		# in case the filters matched all messages and no module overrides exist
		if len(default_logentries) == 0 and (not ":" in mysettings.get("PORTAGE_ELOG_SYSTEM", "")):
			continue

		default_fulllog = _combine_logentries(default_logentries)

		# call listeners
		for listener in _elog_listeners:
			listener(mysettings, str(key), default_logentries, default_fulllog)

		# pass the processing to the individual modules
		for s, levels in logsystems.items():
			# allow per module overrides of PORTAGE_ELOG_CLASSES
			if levels:
				mod_logentries = filter_loglevels(all_logentries[key], levels)
				mod_fulllog = _combine_logentries(mod_logentries)
			else:
				mod_logentries = default_logentries
				mod_fulllog = default_fulllog
			if len(mod_logentries) == 0:
				continue
			# - is nicer than _ for module names, so allow people to use it.
			s = s.replace("-", "_")
			try:
				m = _load_mod("portage.elog.mod_" + s)
				# Timeout after one minute (in case something like the mail
				# module gets hung).
				try:
					AlarmSignal.register(60)
					m.process(mysettings, str(key), mod_logentries, mod_fulllog)
				finally:
					AlarmSignal.unregister()
				if hasattr(m, "finalize") and not m.finalize in _elog_atexit_handlers:
					_elog_atexit_handlers.append(m.finalize)
					atexit_register(m.finalize)
			except (ImportError, AttributeError) as e:
				writemsg(_("!!! Error while importing logging modules "
					"while loading \"mod_%s\":\n") % str(s))
				writemsg("%s\n" % str(e), noiselevel=-1)
			except AlarmSignal:
				writemsg("Timeout in elog_process for system '%s'\n" % s,
					noiselevel=-1)
			except PortageException as e:
				writemsg("%s\n" % str(e), noiselevel=-1)
示例#8
0
	def _populate(self, getbinpkgs=0):
		if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
			return 0

		# Clear all caches in case populate is called multiple times
		# as may be the case when _global_updates calls populate()
		# prior to performing package moves since it only wants to
		# operate on local packages (getbinpkgs=0).
		self._remotepkgs = None
		self.dbapi._clear_cache()
		self.dbapi._aux_cache.clear()
		if True:
			pkg_paths = {}
			self._pkg_paths = pkg_paths
			dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
			if "All" in dirs:
				dirs.remove("All")
			dirs.sort()
			dirs.insert(0, "All")
			pkgindex = self._load_pkgindex()
			pf_index = None
			if not self._pkgindex_version_supported(pkgindex):
				pkgindex = self._new_pkgindex()
			header = pkgindex.header
			metadata = {}
			for d in pkgindex.packages:
				metadata[d["CPV"]] = d
			update_pkgindex = False
			for mydir in dirs:
				for myfile in listdir(os.path.join(self.pkgdir, mydir)):
					if not myfile.endswith(".tbz2"):
						continue
					mypath = os.path.join(mydir, myfile)
					full_path = os.path.join(self.pkgdir, mypath)
					s = os.lstat(full_path)
					if stat.S_ISLNK(s.st_mode):
						continue

					# Validate data from the package index and try to avoid
					# reading the xpak if possible.
					if mydir != "All":
						possibilities = None
						d = metadata.get(mydir+"/"+myfile[:-5])
						if d:
							possibilities = [d]
					else:
						if pf_index is None:
							pf_index = {}
							for mycpv in metadata:
								mycat, mypf = catsplit(mycpv)
								pf_index.setdefault(
									mypf, []).append(metadata[mycpv])
						possibilities = pf_index.get(myfile[:-5])
					if possibilities:
						match = None
						for d in possibilities:
							try:
								if long(d["MTIME"]) != s[stat.ST_MTIME]:
									continue
							except (KeyError, ValueError):
								continue
							try:
								if long(d["SIZE"]) != long(s.st_size):
									continue
							except (KeyError, ValueError):
								continue
							if not self._pkgindex_keys.difference(d):
								match = d
								break
						if match:
							mycpv = match["CPV"]
							if mycpv in pkg_paths:
								# discard duplicates (All/ is preferred)
								continue
							mycpv = _pkg_str(mycpv)
							pkg_paths[mycpv] = mypath
							# update the path if the package has been moved
							oldpath = d.get("PATH")
							if oldpath and oldpath != mypath:
								update_pkgindex = True
							if mypath != mycpv + ".tbz2":
								d["PATH"] = mypath
								if not oldpath:
									update_pkgindex = True
							else:
								d.pop("PATH", None)
								if oldpath:
									update_pkgindex = True
							self.dbapi.cpv_inject(mycpv)
							if not self.dbapi._aux_cache_keys.difference(d):
								aux_cache = self.dbapi._aux_cache_slot_dict()
								for k in self.dbapi._aux_cache_keys:
									aux_cache[k] = d[k]
								self.dbapi._aux_cache[mycpv] = aux_cache
							continue
					if not os.access(full_path, os.R_OK):
						writemsg(_("!!! Permission denied to read " \
							"binary package: '%s'\n") % full_path,
							noiselevel=-1)
						self.invalids.append(myfile[:-5])
						continue
					metadata_bytes = portage.xpak.tbz2(full_path).get_data()
					mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
						encoding=_encodings['repo.content'], errors='replace')
					mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
						encoding=_encodings['repo.content'], errors='replace')
					slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
						encoding=_encodings['repo.content'], errors='replace')
					mypkg = myfile[:-5]
					if not mycat or not mypf or not slot:
						#old-style or corrupt package
						writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
							noiselevel=-1)
						missing_keys = []
						if not mycat:
							missing_keys.append("CATEGORY")
						if not mypf:
							missing_keys.append("PF")
						if not slot:
							missing_keys.append("SLOT")
						msg = []
						if missing_keys:
							missing_keys.sort()
							msg.append(_("Missing metadata key(s): %s.") % \
								", ".join(missing_keys))
						msg.append(_(" This binary package is not " \
							"recoverable and should be deleted."))
						for line in textwrap.wrap("".join(msg), 72):
							writemsg("!!! %s\n" % line, noiselevel=-1)
						self.invalids.append(mypkg)
						continue
					mycat = mycat.strip()
					slot = slot.strip()
					if mycat != mydir and mydir != "All":
						continue
					if mypkg != mypf.strip():
						continue
					mycpv = mycat + "/" + mypkg
					if mycpv in pkg_paths:
						# All is first, so it's preferred.
						continue
					if not self.dbapi._category_re.match(mycat):
						writemsg(_("!!! Binary package has an " \
							"unrecognized category: '%s'\n") % full_path,
							noiselevel=-1)
						writemsg(_("!!! '%s' has a category that is not" \
							" listed in %setc/portage/categories\n") % \
							(mycpv, self.settings["PORTAGE_CONFIGROOT"]),
							noiselevel=-1)
						continue
					mycpv = _pkg_str(mycpv)
					pkg_paths[mycpv] = mypath
					self.dbapi.cpv_inject(mycpv)
					update_pkgindex = True
					d = metadata.get(mycpv, {})
					if d:
						try:
							if long(d["MTIME"]) != s[stat.ST_MTIME]:
								d.clear()
						except (KeyError, ValueError):
							d.clear()
					if d:
						try:
							if long(d["SIZE"]) != long(s.st_size):
								d.clear()
						except (KeyError, ValueError):
							d.clear()

					d["CPV"] = mycpv
					d["SLOT"] = slot
					d["MTIME"] = str(s[stat.ST_MTIME])
					d["SIZE"] = str(s.st_size)

					d.update(zip(self._pkgindex_aux_keys,
						self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
					try:
						self._eval_use_flags(mycpv, d)
					except portage.exception.InvalidDependString:
						writemsg(_("!!! Invalid binary package: '%s'\n") % \
							self.getname(mycpv), noiselevel=-1)
						self.dbapi.cpv_remove(mycpv)
						del pkg_paths[mycpv]

					# record location if it's non-default
					if mypath != mycpv + ".tbz2":
						d["PATH"] = mypath
					else:
						d.pop("PATH", None)
					metadata[mycpv] = d
					if not self.dbapi._aux_cache_keys.difference(d):
						aux_cache = self.dbapi._aux_cache_slot_dict()
						for k in self.dbapi._aux_cache_keys:
							aux_cache[k] = d[k]
						self.dbapi._aux_cache[mycpv] = aux_cache

			for cpv in list(metadata):
				if cpv not in pkg_paths:
					del metadata[cpv]

			# Do not bother to write the Packages index if $PKGDIR/All/ exists
			# since it will provide no benefit due to the need to read CATEGORY
			# from xpak.
			if update_pkgindex and os.access(self.pkgdir, os.W_OK):
				del pkgindex.packages[:]
				pkgindex.packages.extend(iter(metadata.values()))
				self._update_pkgindex_header(pkgindex.header)
				self._pkgindex_write(pkgindex)

		if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
			writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
				noiselevel=-1)

		if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
			self.populated=1
			return
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)
			port_args = []
			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				try:
					f = _urlopen(url, if_modified_since=local_timestamp)
					if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
						remote_timestamp = f.headers.get('timestamp')
				except IOError as err:
					if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
						raise UseCachedCopyOfRemoteIndex()

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'sftp':
						# The sftp command complains about 'Illegal seek' if
						# we try to make it write to /dev/stdout, so use a
						# temp file instead.
						fd, tmp_filename = tempfile.mkstemp()
						os.close(fd)
						if port is not None:
							port_args = ['-P', "%s" % (port,)]
						proc = subprocess.Popen(['sftp'] + port_args + \
							[user_passwd + host + ":" + path, tmp_filename])
						if proc.wait() != os.EX_OK:
							raise
						f = open(tmp_filename, 'rb')
					elif parsed_url.scheme == 'ssh':
						if port is not None:
							port_args = ['-p', "%s" % (port,)]
						proc = subprocess.Popen(['ssh'] + port_args + \
							[user_passwd + host, '--', 'cat', path],
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							raise
						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)
						success = portage.getbinpkg.file_get(url,
						     tmp_dirname, fcmd=fcmd, filename=tmp_basename)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				writemsg("!!! %s\n\n" % str(e))
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				# Organize remote package list as a cpv -> metadata map.
				remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for cpv, remote_metadata in remotepkgs.items():
					remote_metadata["BASE_URI"] = remote_base_uri
					self._pkgindex_uri[cpv] = url
				self._remotepkgs.update(remotepkgs)
				self._remote_has_index = True
				for cpv in remotepkgs:
					self.dbapi.cpv_inject(cpv)
				if True:
					# Remote package instances override local package
					# if they are not identical.
					hash_names = ["SIZE"] + self._pkgindex_hashes
					for cpv, local_metadata in metadata.items():
						remote_metadata = self._remotepkgs.get(cpv)
						if remote_metadata is None:
							continue
						# Use digests to compare identity.
						identical = True
						for hash_name in hash_names:
							local_value = local_metadata.get(hash_name)
							if local_value is None:
								continue
							remote_value = remote_metadata.get(hash_name)
							if remote_value is None:
								continue
							if local_value != remote_value:
								identical = False
								break
						if identical:
							del self._remotepkgs[cpv]
						else:
							# Override the local package in the aux_get cache.
							self.dbapi._aux_cache[cpv] = remote_metadata
				else:
					# Local package instances override remote instances.
					for cpv in metadata:
						self._remotepkgs.pop(cpv, None)

		self.populated=1
示例#9
0
	def _populate_remote(self, getbinpkg_refresh=True):

		self._remote_has_index = False
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)

			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			try:
				download_timestamp = \
					float(pkgindex.header.get("DOWNLOAD_TIMESTAMP", 0))
			except ValueError:
				download_timestamp = 0
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				f = None

				if not getbinpkg_refresh and local_timestamp:
					raise UseCachedCopyOfRemoteIndex()

				try:
					ttl = float(pkgindex.header.get("TTL", 0))
				except ValueError:
					pass
				else:
					if download_timestamp and ttl and \
						download_timestamp + ttl > time.time():
						raise UseCachedCopyOfRemoteIndex()

				# Don't use urlopen for https, unless
				# PEP 476 is supported (bug #469888).
				if parsed_url.scheme not in ('https',) or _have_pep_476():
					try:
						f = _urlopen(url, if_modified_since=local_timestamp)
						if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
							remote_timestamp = f.headers.get('timestamp')
					except IOError as err:
						if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
							raise UseCachedCopyOfRemoteIndex()

						if parsed_url.scheme in ('ftp', 'http', 'https'):
							# This protocol is supposedly supported by urlopen,
							# so apparently there's a problem with the url
							# or a bug in urlopen.
							if self.settings.get("PORTAGE_DEBUG", "0") != "0":
								traceback.print_exc()

							raise
					except ValueError:
						raise ParseError("Invalid Portage BINHOST value '%s'"
										 % url.lstrip())

				if f is None:

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'ssh':
						# Use a pipe so that we can terminate the download
						# early if we detect that the TIMESTAMP header
						# matches that of the cached Packages file.
						ssh_args = ['ssh']
						if port is not None:
							ssh_args.append("-p%s" % (port,))
						# NOTE: shlex evaluates embedded quotes
						ssh_args.extend(portage.util.shlex_split(
							self.settings.get("PORTAGE_SSH_OPTS", "")))
						ssh_args.append(user_passwd + host)
						ssh_args.append('--')
						ssh_args.append('cat')
						ssh_args.append(path)

						proc = subprocess.Popen(ssh_args,
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							fcmd = self.settings.get('FETCHCOMMAND')
							if not fcmd:
								raise EnvironmentError("FETCHCOMMAND is unset")

						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)

						fcmd_vars = {
							"DISTDIR": tmp_dirname,
							"FILE": tmp_basename,
							"URI": url
						}

						for k in ("PORTAGE_SSH_OPTS",):
							v = self.settings.get(k)
							if v is not None:
								fcmd_vars[k] = v

						success = portage.getbinpkg.file_get(
							fcmd=fcmd, fcmd_vars=fcmd_vars)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				# This includes URLError which is raised for SSL
				# certificate errors when PEP 476 is supported.
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				# With Python 2, the EnvironmentError message may
				# contain bytes or unicode, so use _unicode to ensure
				# safety with all locales (bug #532784).
				try:
					error_msg = _unicode(e)
				except UnicodeDecodeError as uerror:
					error_msg = _unicode(uerror.object,
						encoding='utf_8', errors='replace')
				writemsg("!!! %s\n\n" % error_msg)
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for d in pkgindex.packages:
					cpv = _pkg_str(d["CPV"], metadata=d,
						settings=self.settings)
					# Local package instances override remote instances
					# with the same instance_key.
					if self.dbapi.cpv_exists(cpv):
						continue

					d["CPV"] = cpv
					d["BASE_URI"] = remote_base_uri
					d["PKGINDEX_URI"] = url
					self._remotepkgs[self.dbapi._instance_key(cpv)] = d
					self.dbapi.cpv_inject(cpv)

				self._remote_has_index = True