コード例 #1
0
	def rebuild(self, exclude_pkgs=None, include_file=None,
		preserve_paths=None):
		"""
		Raises CommandNotFound if there are preserved libs
		and the scanelf binary is not available.

		@param exclude_pkgs: A set of packages that should be excluded from
			the LinkageMap, since they are being unmerged and their NEEDED
			entries are therefore irrelevant and would only serve to corrupt
			the LinkageMap.
		@type exclude_pkgs: set
		@param include_file: The path of a file containing NEEDED entries for
			a package which does not exist in the vardbapi yet because it is
			currently being merged.
		@type include_file: String
		@param preserve_paths: Libraries preserved by a package instance that
			is currently being merged. They need to be explicitly passed to the
			LinkageMap, since they are not registered in the
			PreservedLibsRegistry yet.
		@type preserve_paths: set
		"""

		os = _os_merge
		root = self._root
		root_len = len(root) - 1
		self._clear_cache()
		self._defpath.update(getlibpaths(self._dbapi.settings['EROOT'],
			env=self._dbapi.settings))
		libs = self._libs
		obj_properties = self._obj_properties

		lines = []

		# Data from include_file is processed first so that it
		# overrides any data from previously installed files.
		if include_file is not None:
			for line in grabfile(include_file):
				lines.append((None, include_file, line))

		aux_keys = [self._needed_aux_key]
		can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
		if can_lock:
			self._dbapi.lock()
		try:
			for cpv in self._dbapi.cpv_all():
				if exclude_pkgs is not None and cpv in exclude_pkgs:
					continue
				needed_file = self._dbapi.getpath(cpv,
					filename=self._needed_aux_key)
				for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
					lines.append((cpv, needed_file, line))
		finally:
			if can_lock:
				self._dbapi.unlock()

		# have to call scanelf for preserved libs here as they aren't 
		# registered in NEEDED.ELF.2 files
		plibs = {}
		if preserve_paths is not None:
			plibs.update((x, None) for x in preserve_paths)
		if self._dbapi._plib_registry and \
			self._dbapi._plib_registry.hasEntries():
			for cpv, items in \
				self._dbapi._plib_registry.getPreservedLibs().items():
				if exclude_pkgs is not None and cpv in exclude_pkgs:
					# These preserved libs will either be unmerged,
					# rendering them irrelevant, or they will be
					# preserved in the replacement package and are
					# already represented via the preserve_paths
					# parameter.
					continue
				plibs.update((x, cpv) for x in items)
		if plibs:
			args = [os.path.join(EPREFIX or "/", "usr/bin/scanelf"), "-qF", "%a;%F;%S;%r;%n"]
			args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
				for x in plibs)
			try:
				proc = subprocess.Popen(args, stdout=subprocess.PIPE)
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
				raise CommandNotFound(args[0])
			else:
				for l in proc.stdout:
					try:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='strict')
					except UnicodeDecodeError:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='replace')
						writemsg_level(_("\nError decoding characters " \
							"returned from scanelf: %s\n\n") % (l,),
							level=logging.ERROR, noiselevel=-1)
					l = l[3:].rstrip("\n")
					if not l:
						continue
					try:
						entry = NeededEntry.parse("scanelf", l)
					except InvalidData as e:
						writemsg_level("\n%s\n\n" % (e,),
							level=logging.ERROR, noiselevel=-1)
						continue
					try:
						with open(_unicode_encode(entry.filename,
							encoding=_encodings['fs'],
							errors='strict'), 'rb') as f:
							elf_header = ELFHeader.read(f)
					except EnvironmentError as e:
						if e.errno != errno.ENOENT:
							raise
						# File removed concurrently.
						continue
					entry.multilib_category = compute_multilib_category(elf_header)
					entry.filename = entry.filename[root_len:]
					owner = plibs.pop(entry.filename, None)
					lines.append((owner, "scanelf", _unicode(entry)))
				proc.wait()
				proc.stdout.close()

		if plibs:
			# Preserved libraries that did not appear in the scanelf output.
			# This is known to happen with statically linked libraries.
			# Generate dummy lines for these, so we can assume that every
			# preserved library has an entry in self._obj_properties. This
			# is important in order to prevent findConsumers from raising
			# an unwanted KeyError.
			for x, cpv in plibs.items():
				lines.append((cpv, "plibs", ";".join(['', x, '', '', ''])))

		# Share identical frozenset instances when available,
		# in order to conserve memory.
		frozensets = {}

		for owner, location, l in lines:
			l = l.rstrip("\n")
			if not l:
				continue
			if '\0' in l:
				# os.stat() will raise "TypeError: must be encoded string
				# without NULL bytes, not str" in this case.
				writemsg_level(_("\nLine contains null byte(s) " \
					"in %s: %s\n\n") % (location, l),
					level=logging.ERROR, noiselevel=-1)
				continue
			try:
				entry = NeededEntry.parse(location, l)
			except InvalidData as e:
				writemsg_level("\n%s\n\n" % (e,),
					level=logging.ERROR, noiselevel=-1)
				continue

			# If NEEDED.ELF.2 contains the new multilib category field,
			# then use that for categorization. Otherwise, if a mapping
			# exists, map e_machine (entry.arch) to an approximate
			# multilib category. If all else fails, use e_machine, just
			# as older versions of portage did.
			arch = entry.multilib_category
			if arch is None:
				arch = _approx_multilib_categories.get(
					entry.arch, entry.arch)

			obj = entry.filename
			soname = entry.soname
			expand = {"ORIGIN": os.path.dirname(entry.filename)}
			path = frozenset(normalize_path(
				varexpand(x, expand, error_leader=lambda: "%s: " % location))
				for x in entry.runpaths)
			path = frozensets.setdefault(path, path)
			needed = frozenset(entry.needed)

			needed = frozensets.setdefault(needed, needed)

			obj_key = self._obj_key(obj)
			indexed = True
			myprops = obj_properties.get(obj_key)
			if myprops is None:
				indexed = False
				myprops = self._obj_properties_class(
					arch, needed, path, soname, [], owner)
				obj_properties[obj_key] = myprops
			# All object paths are added into the obj_properties tuple.
			myprops.alt_paths.append(obj)

			# Don't index the same file more that once since only one
			# set of data can be correct and therefore mixing data
			# may corrupt the index (include_file overrides previously
			# installed).
			if indexed:
				continue

			arch_map = libs.get(arch)
			if arch_map is None:
				arch_map = {}
				libs[arch] = arch_map
			if soname:
				soname_map = arch_map.get(soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=[], consumers=[])
					arch_map[soname] = soname_map
				soname_map.providers.append(obj_key)
			for needed_soname in needed:
				soname_map = arch_map.get(needed_soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=[], consumers=[])
					arch_map[needed_soname] = soname_map
				soname_map.consumers.append(obj_key)

		for arch, sonames in libs.items():
			for soname_node in sonames.values():
				soname_node.providers = tuple(set(soname_node.providers))
				soname_node.consumers = tuple(set(soname_node.consumers))
コード例 #2
0
ファイル: LinkageMapELF.py プロジェクト: aeroniero33/portage
	def rebuild(self, exclude_pkgs=None, include_file=None,
		preserve_paths=None):
		"""
		Raises CommandNotFound if there are preserved libs
		and the scanelf binary is not available.

		@param exclude_pkgs: A set of packages that should be excluded from
			the LinkageMap, since they are being unmerged and their NEEDED
			entries are therefore irrelevant and would only serve to corrupt
			the LinkageMap.
		@type exclude_pkgs: set
		@param include_file: The path of a file containing NEEDED entries for
			a package which does not exist in the vardbapi yet because it is
			currently being merged.
		@type include_file: String
		@param preserve_paths: Libraries preserved by a package instance that
			is currently being merged. They need to be explicitly passed to the
			LinkageMap, since they are not registered in the
			PreservedLibsRegistry yet.
		@type preserve_paths: set
		"""

		os = _os_merge
		root = self._root
		root_len = len(root) - 1
		self._clear_cache()
		self._defpath.update(getlibpaths(self._root, env=self._dbapi.settings))
		libs = self._libs
		obj_properties = self._obj_properties

		lines = []

		# Data from include_file is processed first so that it
		# overrides any data from previously installed files.
		if include_file is not None:
			for line in grabfile(include_file):
				lines.append((None, include_file, line))

		aux_keys = [self._needed_aux_key]
		can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
		if can_lock:
			self._dbapi.lock()
		try:
			for cpv in self._dbapi.cpv_all():
				if exclude_pkgs is not None and cpv in exclude_pkgs:
					continue
				needed_file = self._dbapi.getpath(cpv,
					filename=self._needed_aux_key)
				for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
					lines.append((cpv, needed_file, line))
		finally:
			if can_lock:
				self._dbapi.unlock()

		# have to call scanelf for preserved libs here as they aren't 
		# registered in NEEDED.ELF.2 files
		plibs = {}
		if preserve_paths is not None:
			plibs.update((x, None) for x in preserve_paths)
		if self._dbapi._plib_registry and \
			self._dbapi._plib_registry.hasEntries():
			for cpv, items in \
				self._dbapi._plib_registry.getPreservedLibs().items():
				if exclude_pkgs is not None and cpv in exclude_pkgs:
					# These preserved libs will either be unmerged,
					# rendering them irrelevant, or they will be
					# preserved in the replacement package and are
					# already represented via the preserve_paths
					# parameter.
					continue
				plibs.update((x, cpv) for x in items)
		if plibs:
			args = [os.path.join(EPREFIX or "/", "usr/bin/scanelf"), "-qF", "%a;%F;%S;%r;%n"]
			args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
				for x in plibs)
			try:
				proc = subprocess.Popen(args, stdout=subprocess.PIPE)
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
				raise CommandNotFound(args[0])
			else:
				for l in proc.stdout:
					try:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='strict')
					except UnicodeDecodeError:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='replace')
						writemsg_level(_("\nError decoding characters " \
							"returned from scanelf: %s\n\n") % (l,),
							level=logging.ERROR, noiselevel=-1)
					l = l[3:].rstrip("\n")
					if not l:
						continue
					fields = l.split(";")
					if len(fields) < 5:
						writemsg_level(_("\nWrong number of fields " \
							"returned from scanelf: %s\n\n") % (l,),
							level=logging.ERROR, noiselevel=-1)
						continue
					fields[1] = fields[1][root_len:]
					owner = plibs.pop(fields[1], None)
					lines.append((owner, "scanelf", ";".join(fields)))
				proc.wait()
				proc.stdout.close()

		if plibs:
			# Preserved libraries that did not appear in the scanelf output.
			# This is known to happen with statically linked libraries.
			# Generate dummy lines for these, so we can assume that every
			# preserved library has an entry in self._obj_properties. This
			# is important in order to prevent findConsumers from raising
			# an unwanted KeyError.
			for x, cpv in plibs.items():
				lines.append((cpv, "plibs", ";".join(['', x, '', '', ''])))

		# Share identical frozenset instances when available,
		# in order to conserve memory.
		frozensets = {}

		for owner, location, l in lines:
			l = l.rstrip("\n")
			if not l:
				continue
			if '\0' in l:
				# os.stat() will raise "TypeError: must be encoded string
				# without NULL bytes, not str" in this case.
				writemsg_level(_("\nLine contains null byte(s) " \
					"in %s: %s\n\n") % (location, l),
					level=logging.ERROR, noiselevel=-1)
				continue
			try:
				entry = NeededEntry.parse(location, l)
			except InvalidData as e:
				writemsg_level("\n%s\n\n" % (e,),
					level=logging.ERROR, noiselevel=-1)
				continue

			# If NEEDED.ELF.2 contains the new multilib category field,
			# then use that for categorization. Otherwise, if a mapping
			# exists, map e_machine (entry.arch) to an approximate
			# multilib category. If all else fails, use e_machine, just
			# as older versions of portage did.
			arch = entry.multilib_category
			if arch is None:
				arch = _approx_multilib_categories.get(
					entry.arch, entry.arch)

			obj = entry.filename
			soname = entry.soname
			expand = {"ORIGIN": os.path.dirname(entry.filename)}
			path = frozenset(normalize_path(
				varexpand(x, expand, error_leader=lambda: "%s: " % location))
				for x in entry.runpaths)
			path = frozensets.setdefault(path, path)
			needed = frozenset(entry.needed)

			needed = frozensets.setdefault(needed, needed)

			obj_key = self._obj_key(obj)
			indexed = True
			myprops = obj_properties.get(obj_key)
			if myprops is None:
				indexed = False
				myprops = self._obj_properties_class(
					arch, needed, path, soname, [], owner)
				obj_properties[obj_key] = myprops
			# All object paths are added into the obj_properties tuple.
			myprops.alt_paths.append(obj)

			# Don't index the same file more that once since only one
			# set of data can be correct and therefore mixing data
			# may corrupt the index (include_file overrides previously
			# installed).
			if indexed:
				continue

			arch_map = libs.get(arch)
			if arch_map is None:
				arch_map = {}
				libs[arch] = arch_map
			if soname:
				soname_map = arch_map.get(soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=[], consumers=[])
					arch_map[soname] = soname_map
				soname_map.providers.append(obj_key)
			for needed_soname in needed:
				soname_map = arch_map.get(needed_soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=[], consumers=[])
					arch_map[needed_soname] = soname_map
				soname_map.consumers.append(obj_key)

		for arch, sonames in libs.items():
			for soname_node in sonames.values():
				soname_node.providers = tuple(set(soname_node.providers))
				soname_node.consumers = tuple(set(soname_node.consumers))
コード例 #3
0
	def rebuild(self, exclude_pkgs=None, include_file=None):
		"""
		Raises CommandNotFound if there are preserved libs
		and the scanelf binary is not available.
		"""

		os = _os_merge
		root = self._eroot
		root_len = len(root) - 1
		self._clear_cache()
		self._defpath.update(getlibpaths(self._eroot))
		libs = self._libs
		obj_properties = self._obj_properties

		lines = []

		# Data from include_file is processed first so that it
		# overrides any data from previously installed files.
		if include_file is not None:
			lines += grabfile(include_file)

		aux_keys = [self._needed_aux_key]
		for cpv in self._dbapi.cpv_all():
			if exclude_pkgs is not None and cpv in exclude_pkgs:
				continue
			lines += self._dbapi.aux_get(cpv, aux_keys)[0].split('\n')
		# Cache NEEDED.* files avoid doing excessive IO for every rebuild.
		self._dbapi.flush_cache()

		# have to call scanelf for preserved libs here as they aren't 
		# registered in NEEDED.ELF.2 files
		plibs = set()
		if self._dbapi._plib_registry and self._dbapi._plib_registry.getPreservedLibs():
			args = ["/usr/bin/scanelf", "-qF", "%a;%F;%S;%r;%n"]
			for items in self._dbapi._plib_registry.getPreservedLibs().values():
				plibs.update(items)
				args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
					for x in items)
			try:
				proc = subprocess.Popen(args, stdout=subprocess.PIPE)
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
				raise CommandNotFound(args[0])
			else:
				for l in proc.stdout:
					try:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='strict')
					except UnicodeDecodeError:
						l = _unicode_decode(l,
							encoding=_encodings['content'], errors='replace')
						writemsg_level(_("\nError decoding characters " \
							"returned from scanelf: %s\n\n") % (l,),
							level=logging.ERROR, noiselevel=-1)
					l = l[3:].rstrip("\n")
					if not l:
						continue
					fields = l.split(";")
					if len(fields) < 5:
						writemsg_level(_("\nWrong number of fields " \
							"returned from scanelf: %s\n\n") % (l,),
							level=logging.ERROR, noiselevel=-1)
						continue
					fields[1] = fields[1][root_len:]
					plibs.discard(fields[1])
					lines.append(";".join(fields))
				proc.wait()

		if plibs:
			# Preserved libraries that did not appear in the scanelf output.
			# This is known to happen with statically linked libraries.
			# Generate dummy lines for these, so we can assume that every
			# preserved library has an entry in self._obj_properties. This
			# is important in order to prevent findConsumers from raising
			# an unwanted KeyError.
			for x in plibs:
				lines.append(";".join(['', x, '', '', '']))

		for l in lines:
			l = l.rstrip("\n")
			if not l:
				continue
			fields = l.split(";")
			if len(fields) < 5:
				writemsg_level(_("\nWrong number of fields " \
					"in %s: %s\n\n") % (self._needed_aux_key, l),
					level=logging.ERROR, noiselevel=-1)
				continue
			arch = fields[0]
			obj = fields[1]
			soname = fields[2]
			path = set([normalize_path(x) \
				for x in filter(None, fields[3].replace(
				"${ORIGIN}", os.path.dirname(obj)).replace(
				"$ORIGIN", os.path.dirname(obj)).split(":"))])
			needed = [x for x in fields[4].split(",") if x]

			obj_key = self._obj_key(obj)
			indexed = True
			myprops = obj_properties.get(obj_key)
			if myprops is None:
				indexed = False
				myprops = (arch, needed, path, soname, set())
				obj_properties[obj_key] = myprops
			# All object paths are added into the obj_properties tuple.
			myprops[4].add(obj)

			# Don't index the same file more that once since only one
			# set of data can be correct and therefore mixing data
			# may corrupt the index (include_file overrides previously
			# installed).
			if indexed:
				continue

			arch_map = libs.get(arch)
			if arch_map is None:
				arch_map = {}
				libs[arch] = arch_map
			if soname:
				soname_map = arch_map.get(soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=set(), consumers=set())
					arch_map[soname] = soname_map
				soname_map.providers.add(obj_key)
			for needed_soname in needed:
				soname_map = arch_map.get(needed_soname)
				if soname_map is None:
					soname_map = self._soname_map_class(
						providers=set(), consumers=set())
					arch_map[needed_soname] = soname_map
				soname_map.consumers.add(obj_key)