示例#1
0
文件: xpak.py 项目: amadio/portage
	def unpackinfo(self, mydest):
		"""Unpacks all the files from the dataSegment into 'mydest'."""
		if not self.scan():
			return 0
		mydest = normalize_path(mydest) + os.sep
		a = open(_unicode_encode(self.file,
			encoding=_encodings['fs'], errors='strict'), 'rb')
		if not os.path.exists(mydest):
			os.makedirs(mydest)
		startpos = 0
		while ((startpos + 8) < self.indexsize):
			namelen = decodeint(self.index[startpos:startpos + 4])
			datapos = decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen])
			datalen = decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen])
			myname = self.index[startpos + 4:startpos + 4 + namelen]
			myname = _unicode_decode(myname,
				encoding=_encodings['repo.content'], errors='replace')
			filename = os.path.join(mydest, myname.lstrip(os.sep))
			filename = normalize_path(filename)
			if not filename.startswith(mydest):
				# myname contains invalid ../ component(s)
				continue
			dirname = os.path.dirname(filename)
			if dirname:
				if not os.path.exists(dirname):
					os.makedirs(dirname)
			mydat = open(_unicode_encode(filename,
				encoding=_encodings['fs'], errors='strict'), 'wb')
			a.seek(self.datapos + datapos)
			mydat.write(a.read(datalen))
			mydat.close()
			startpos = startpos + namelen + 12
		a.close()
		return 1
示例#2
0
def xpand(myid,mydest):
	myindex=myid[0]
	mydata=myid[1]
	try:
		origdir=os.getcwd()
	except SystemExit as e:
		raise
	except:
		os.chdir("/")
		origdir="/"
	os.chdir(mydest)
	myindexlen=len(myindex)
	startpos=0
	while ((startpos+8)<myindexlen):
		namelen=decodeint(myindex[startpos:startpos+4])
		datapos=decodeint(myindex[startpos+4+namelen:startpos+8+namelen]);
		datalen=decodeint(myindex[startpos+8+namelen:startpos+12+namelen]);
		myname=myindex[startpos+4:startpos+4+namelen]
		dirname=os.path.dirname(myname)
		if dirname:
			if not os.path.exists(dirname):
				os.makedirs(dirname)
		mydat = open(_unicode_encode(myname,
			encoding=_encodings['fs'], errors='strict'), 'wb')
		mydat.write(mydata[datapos:datapos+datalen])
		mydat.close()
		startpos=startpos+namelen+12
	os.chdir(origdir)
示例#3
0
def _file_archive_ensure_dir(parent_dir):
	"""
	Ensure that the parent directory for an archive exists.
	If a file exists where a directory is needed, then rename
	it (see bug 256376).

	@param parent_dir: path of parent directory
	@type parent_dir: str
	"""

	for parent in iter_parents(parent_dir):
		# Use lstat because a symlink to a directory might point
		# to a directory outside of the config archive, making
		# it an unsuitable parent.
		try:
			parent_st = os.lstat(parent)
		except OSError:
			pass
		else:
			if not stat.S_ISDIR(parent_st.st_mode):
				_file_archive_rotate(parent)
			break

	try:
		os.makedirs(parent_dir)
	except OSError:
		pass
示例#4
0
文件: xpak.py 项目: amadio/portage
def xpand(myid, mydest):
	mydest = normalize_path(mydest) + os.sep
	myindex = myid[0]
	mydata = myid[1]
	myindexlen = len(myindex)
	startpos = 0
	while ((startpos + 8) < myindexlen):
		namelen = decodeint(myindex[startpos:startpos + 4])
		datapos = decodeint(myindex[startpos + 4 + namelen:startpos + 8 + namelen])
		datalen = decodeint(myindex[startpos + 8 + namelen:startpos + 12 + namelen])
		myname = myindex[startpos + 4:startpos + 4 + namelen]
		myname = _unicode_decode(myname,
			encoding=_encodings['repo.content'], errors='replace')
		filename = os.path.join(mydest, myname.lstrip(os.sep))
		filename = normalize_path(filename)
		if not filename.startswith(mydest):
			# myname contains invalid ../ component(s)
			continue
		dirname = os.path.dirname(filename)
		if dirname:
			if not os.path.exists(dirname):
				os.makedirs(dirname)
		mydat = open(_unicode_encode(filename,
			encoding=_encodings['fs'], errors='strict'), 'wb')
		mydat.write(mydata[datapos:datapos + datalen])
		mydat.close()
		startpos = startpos + namelen + 12
示例#5
0
def dodir(path):
	try:
		os.makedirs(path, 0o755)
	except OSError:
		if not os.path.isdir(path):
			raise
		os.chmod(path, 0o755)
def ensure_dirs(dir_path, *args, **kwargs):
	"""Create a directory and call apply_permissions.
	Returns True if a directory is created or the permissions needed to be
	modified, and False otherwise."""

	created_dir = False

	try:
		os.makedirs(dir_path)
		created_dir = True
	except OSError as oe:
		func_call = "makedirs('%s')" % dir_path
		if oe.errno in (errno.EEXIST, errno.EISDIR):
			pass
		else:
			if os.path.isdir(dir_path):
				# NOTE: DragonFly raises EPERM for makedir('/')
				# and that is supposed to be ignored here.
				pass
			elif oe.errno == errno.EPERM:
				raise OperationNotPermitted(func_call)
			elif oe.errno == errno.EACCES:
				raise PermissionDenied(func_call)
			elif oe.errno == errno.EROFS:
				raise ReadOnlyFileSystem(func_call)
			else:
				raise
	perms_modified = apply_permissions(dir_path, *args, **kwargs)
	return created_dir or perms_modified
示例#7
0
	def __init__(self, ebuilds={}, installed={}, profile={}, repo_configs={}, \
		user_config={}, sets={}, world=[], world_sets=[], distfiles={}, debug=False):
		"""
		ebuilds: cpv -> metadata mapping simulating available ebuilds. 
		installed: cpv -> metadata mapping simulating installed packages.
			If a metadata key is missing, it gets a default value.
		profile: settings defined by the profile.
		"""
		self.debug = debug
		self.eprefix = normalize_path(tempfile.mkdtemp())
		self.eroot = self.eprefix + os.sep
		self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
		self.portdir = os.path.join(self.eroot, "usr/portage")
		self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
		os.makedirs(self.portdir)
		os.makedirs(self.vdbdir)

		if not debug:
			portage.util.noiselimit = -2

		self.repo_dirs = {}
		#Make sure the main repo is always created
		self._get_repo_dir("test_repo")

		self._create_distfiles(distfiles)
		self._create_ebuilds(ebuilds)
		self._create_installed(installed)
		self._create_profile(ebuilds, installed, profile, repo_configs, user_config, sets)
		self._create_world(world, world_sets)

		self.settings, self.trees = self._load_config()

		self._create_ebuild_manifests(ebuilds)
		
		portage.util.noiselimit = 0
示例#8
0
	def _create_installed(self, installed):
		for cpv in installed:
			a = Atom("=" + cpv, allow_repo=True)
			repo = a.repo
			if repo is None:
				repo = "test_repo"

			vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
			try:
				os.makedirs(vdb_pkg_dir)
			except os.error:
				pass

			metadata = installed[cpv].copy()
			metadata.setdefault("SLOT", "0")
			metadata.setdefault("BUILD_TIME", "0")
			metadata.setdefault("COUNTER", "0")
			metadata.setdefault("KEYWORDS", "~x86")

			unknown_keys = set(metadata).difference(
				portage.dbapi.dbapi._known_keys)
			unknown_keys.discard("BUILD_TIME")
			unknown_keys.discard("COUNTER")
			unknown_keys.discard("repository")
			unknown_keys.discard("USE")
			if unknown_keys:
				raise ValueError("metadata of installed '%s' contains unknown keys: %s" %
					(cpv, sorted(unknown_keys)))

			metadata["repository"] = repo
			for k, v in metadata.items():
				with open(os.path.join(vdb_pkg_dir, k), "w") as f:
					f.write("%s\n" % v)
示例#9
0
文件: git.py 项目: jonasstein/portage
	def new(self, **kwargs):
		'''Do the initial clone of the repository'''
		if kwargs:
			self._kwargs(kwargs)
		emerge_config = self.options.get('emerge_config', None)
		portdb = self.options.get('portdb', None)
		try:
			if not os.path.exists(self.repo.location):
				os.makedirs(self.repo.location)
				self.logger(self.xterm_titles,
					'Created new directory %s' % self.repo.location)
		except IOError:
			return (1, False)
		msg = ">>> Cloning git repository from upstream into %s..." % self.repo.location
		self.logger(self.xterm_titles, msg)
		writemsg_level(msg + "\n")
		sync_uri = self.repo.sync_uri
		if sync_uri.startswith("file://"):
			sync_uri = sync_uri[6:]
		exitcode = portage.process.spawn_bash("cd %s ; %s clone %s ." % \
			(portage._shell_quote(self.repo.location),
			self.bin_command,
			portage._shell_quote(sync_uri)),
			**portage._native_kwargs(self.spawn_kwargs))
		if exitcode != os.EX_OK:
			msg = "!!! git clone error in %s" % self.repo.location
			self.logger(self.xterm_titles, msg)
			writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
			return (exitcode, False)
		msg = ">>> Git clone successful"
		self.logger(self.xterm_titles, msg)
		writemsg_level(msg + "\n")
		return (os.EX_OK, True)
示例#10
0
文件: git.py 项目: gmt/portage
	def new(self, **kwargs):
		'''Do the initial clone of the repository'''
		if kwargs:
			self._kwargs(kwargs)
		try:
			if not os.path.exists(self.repo.location):
				os.makedirs(self.repo.location)
				self.logger(self.xterm_titles,
					'Created new directory %s' % self.repo.location)
		except IOError:
			return (1, False)

		sync_uri = self.repo.sync_uri
		if sync_uri.startswith("file://"):
			sync_uri = sync_uri[6:]

		git_cmd_opts = ""
		if self.settings.get("PORTAGE_QUIET") == "1":
			git_cmd_opts += " --quiet"
		if self.repo.sync_depth is not None:
			git_cmd_opts += " --depth %d" % self.repo.sync_depth
		git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts,
			portage._shell_quote(sync_uri))
		writemsg_level(git_cmd + "\n")

		exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
				portage._shell_quote(self.repo.location), git_cmd),
			**portage._native_kwargs(self.spawn_kwargs))
		if exitcode != os.EX_OK:
			msg = "!!! git clone error in %s" % self.repo.location
			self.logger(self.xterm_titles, msg)
			writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
			return (exitcode, False)
		return (os.EX_OK, True)
示例#11
0
	def setUp(self):
		super(RepomanEchangelogTestCase, self).setUp()

		self.tmpdir = tempfile.mkdtemp(prefix='repoman.echangelog.')

		self.skel_changelog = os.path.join(self.tmpdir, 'skel.ChangeLog')
		skel = [
			'# ChangeLog for <CATEGORY>/<PACKAGE_NAME>\n',
			'# Copyright 1999-2000 Gentoo Foundation; Distributed under the GPL v2\n',
			'# $Header: $\n'
		]
		self._writelines(self.skel_changelog, skel)

		self.cat = 'mycat'
		self.pkg = 'mypkg'
		self.pkgdir = os.path.join(self.tmpdir, self.cat, self.pkg)
		os.makedirs(self.pkgdir)

		self.header_pkg = '# ChangeLog for %s/%s\n' % (self.cat, self.pkg)
		self.header_copyright = '# Copyright 1999-%s Gentoo Foundation; Distributed under the GPL v2\n' % \
			time.strftime('%Y', time.gmtime())
		self.header_cvs = '# $Header: $\n'

		self.changelog = os.path.join(self.pkgdir, 'ChangeLog')

		self.user = '******'
示例#12
0
	def testSetCpv(self):
		"""
		Test the clone via constructor.
		"""

		ebuilds = {
			"dev-libs/A-1": {"IUSE": "static-libs"},
			"dev-libs/B-1": {"IUSE": "static-libs"},
		}

		env_files = {
			"A" : ("USE=\"static-libs\"",)
		}

		package_env = (
			"dev-libs/A A",
		)

		eprefix = normalize_path(tempfile.mkdtemp())
		playground = None
		try:
			user_config_dir = os.path.join(eprefix, USER_CONFIG_PATH)
			os.makedirs(user_config_dir)

			with io.open(os.path.join(user_config_dir, "package.env"),
				mode='w', encoding=_encodings['content']) as f:
				for line in package_env:
					f.write(line + "\n")

			env_dir = os.path.join(user_config_dir, "env")
			os.makedirs(env_dir)
			for k, v in env_files.items():
				with io.open(os.path.join(env_dir, k), mode='w',
					encoding=_encodings['content']) as f:
					for line in v:
						f.write(line + "\n")

			playground = ResolverPlayground(eprefix=eprefix, ebuilds=ebuilds)
			settings = config(clone=playground.settings)

			result = playground.run(["=dev-libs/A-1"])
			pkg, existing_node = result.depgraph._select_package(
				playground.eroot, Atom("=dev-libs/A-1"))
			settings.setcpv(pkg)
			self.assertTrue("static-libs" in
				settings["PORTAGE_USE"].split())

			# Test bug #522362, where a USE=static-libs package.env
			# setting leaked from one setcpv call to the next.
			pkg, existing_node = result.depgraph._select_package(
				playground.eroot, Atom("=dev-libs/B-1"))
			settings.setcpv(pkg)
			self.assertTrue("static-libs" not in
				settings["PORTAGE_USE"].split())

		finally:
			if playground is None:
				shutil.rmtree(eprefix)
			else:
				playground.cleanup()
示例#13
0
	def __init__(self, ebuilds={}, installed={}, profile={}, user_config={}, sets={}, world=[], debug=False):
		"""
		ebuilds: cpv -> metadata mapping simulating avaiable ebuilds. 
		installed: cpv -> metadata mapping simulating installed packages.
			If a metadata key is missing, it gets a default value.
		profile: settings defined by the profile.
		"""
		self.debug = debug
		self.root = "/"
		self.eprefix = tempfile.mkdtemp()
		self.eroot = self.root + self.eprefix.lstrip(os.sep) + os.sep
		self.portdir = os.path.join(self.eroot, "usr/portage")
		self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
		os.makedirs(self.portdir)
		os.makedirs(self.vdbdir)

		if not debug:
			portage.util.noiselimit = -2

		self._create_ebuilds(ebuilds)
		self._create_installed(installed)
		self._create_profile(ebuilds, installed, profile, user_config, sets)
		self._create_world(world)

		self.settings, self.trees = self._load_config()

		self._create_ebuild_manifests(ebuilds)
		
		portage.util.noiselimit = 0
示例#14
0
def save_cache(logger, to_save={}, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
	''' Tries to store caching information.
		@param logger
		@param to_save have to be dict with keys:
			libraries, la_libraries, libraries_links and binaries
	'''

	if not os.path.exists(temp_path):
		os.makedirs(temp_path)

	try:
		_file = open(_unicode_encode(os.path.join(temp_path, 'timestamp'),
			encoding=_encodings['fs']), mode='w', encoding=_encodings['content'])
		_file.write(_unicode(int(time.time())))
		_file.close()

		for key,val in to_save.items():
			_file = open(_unicode_encode(os.path.join(temp_path, key),
				encoding=_encodings['fs']), mode='w',
				encoding=_encodings['content'])
			for line in val:
				_file.write(line + '\n')
			_file.close()
	except Exception as ex:
		logger.warning('\t' + red('Could not save cache: %s' %str(ex)))
	def testDoebuildSpawn(self):
		playground = ResolverPlayground()
		try:
			settings = config(clone=playground.settings)
			cpv = 'sys-apps/portage-2.1'
			metadata = {
				'EAPI'      : '2',
				'INHERITED' : 'python eutils',
				'IUSE'      : 'build doc epydoc python3 selinux',
				'LICENSE'   : 'GPL-2',
				'PROVIDE'   : 'virtual/portage',
				'RDEPEND'   : '>=app-shells/bash-3.2_p17 >=dev-lang/python-2.6',
				'SLOT'      : '0',
			}
			root_config = playground.trees[playground.eroot]['root_config']
			pkg = Package(built=False, cpv=cpv, installed=False,
				metadata=metadata, root_config=root_config,
				type_name='ebuild')
			settings.setcpv(pkg)
			settings['PORTAGE_PYTHON'] = _python_interpreter
			settings['PORTAGE_BUILDDIR'] = os.path.join(
				settings['PORTAGE_TMPDIR'], cpv)
			settings['T'] = os.path.join(
				settings['PORTAGE_BUILDDIR'], 'temp')
			for x in ('PORTAGE_BUILDDIR', 'T'):
				os.makedirs(settings[x])
			# Create a fake environment, to pretend as if the ebuild
			# has been sourced already.
			open(os.path.join(settings['T'], 'environment'), 'wb').close()

			scheduler = PollScheduler().sched_iface
			for phase in ('_internal_test',):

				# Test EbuildSpawnProcess by calling doebuild.spawn() with
				# returnpid=False. This case is no longer used by portage
				# internals since EbuildPhase is used instead and that passes
				# returnpid=True to doebuild.spawn().
				rval = doebuild_spawn("%s %s" % (_shell_quote(
					os.path.join(settings["PORTAGE_BIN_PATH"],
					os.path.basename(EBUILD_SH_BINARY))), phase),
					settings, free=1)
				self.assertEqual(rval, os.EX_OK)

				ebuild_phase = EbuildPhase(background=False,
					phase=phase, scheduler=scheduler,
					settings=settings)
				ebuild_phase.start()
				ebuild_phase.wait()
				self.assertEqual(ebuild_phase.returncode, os.EX_OK)

			ebuild_phase = MiscFunctionsProcess(background=False,
				commands=['success_hooks'],
				scheduler=scheduler, settings=settings)
			ebuild_phase.start()
			ebuild_phase.wait()
			self.assertEqual(ebuild_phase.returncode, os.EX_OK)
		finally:
			playground.cleanup()
示例#16
0
文件: git.py 项目: mgorny/portage
	def new(self, **kwargs):
		'''Do the initial clone of the repository'''
		if kwargs:
			self._kwargs(kwargs)
		if not self.has_bin:
			return (1, False)
		try:
			if not os.path.exists(self.repo.location):
				os.makedirs(self.repo.location)
				self.logger(self.xterm_titles,
					'Created new directory %s' % self.repo.location)
		except IOError:
			return (1, False)

		sync_uri = self.repo.sync_uri
		if sync_uri.startswith("file://"):
			sync_uri = sync_uri[7:]

		git_cmd_opts = ""
		if self.repo.module_specific_options.get('sync-git-env'):
			shlexed_env = shlex_split(self.repo.module_specific_options['sync-git-env'])
			env = dict((k, v) for k, _, v in (assignment.partition('=') for assignment in shlexed_env) if k)
			self.spawn_kwargs['env'].update(env)

		if self.repo.module_specific_options.get('sync-git-clone-env'):
			shlexed_env = shlex_split(self.repo.module_specific_options['sync-git-clone-env'])
			clone_env = dict((k, v) for k, _, v in (assignment.partition('=') for assignment in shlexed_env) if k)
			self.spawn_kwargs['env'].update(clone_env)

		if self.settings.get("PORTAGE_QUIET") == "1":
			git_cmd_opts += " --quiet"
		if self.repo.clone_depth is not None:
			if self.repo.clone_depth != 0:
				git_cmd_opts += " --depth %d" % self.repo.clone_depth
		elif self.repo.sync_depth is not None:
			if self.repo.sync_depth != 0:
				git_cmd_opts += " --depth %d" % self.repo.sync_depth
		else:
			# default
			git_cmd_opts += " --depth 1"

		if self.repo.module_specific_options.get('sync-git-clone-extra-opts'):
			git_cmd_opts += " %s" % self.repo.module_specific_options['sync-git-clone-extra-opts']
		git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts,
			portage._shell_quote(sync_uri))
		writemsg_level(git_cmd + "\n")

		exitcode = portage.process.spawn_bash("cd %s ; exec %s" % (
				portage._shell_quote(self.repo.location), git_cmd),
			**self.spawn_kwargs)
		if exitcode != os.EX_OK:
			msg = "!!! git clone error in %s" % self.repo.location
			self.logger(self.xterm_titles, msg)
			writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
			return (exitcode, False)
		if not self.verify_head():
			return (1, False)
		return (os.EX_OK, True)
示例#17
0
	def testFakedbapi(self):
		packages = (
			("sys-apps/portage-2.1.10", {
				"EAPI"         : "2",
				"IUSE"         : "ipc doc",
				"repository"   : "gentoo",
				"SLOT"         : "0",
				"USE"          : "ipc missing-iuse",
			}),
			("virtual/package-manager-0", {
				"EAPI"         : "0",
				"repository"   : "gentoo",
				"SLOT"         : "0",
			}),
		)

		match_tests = (
			("sys-apps/portage:0[ipc]",             ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0[-ipc]",            []),
			("sys-apps/portage:0[doc]",             []),
			("sys-apps/portage:0[-doc]",            ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0",                  ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0[missing-iuse]",    []),
			("sys-apps/portage:0[-missing-iuse]",   []),
			("sys-apps/portage:0::gentoo[ipc]",     ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0::multilib[ipc]",   []),
			("virtual/package-manager",             ["virtual/package-manager-0"]),
		)

		tempdir = tempfile.mkdtemp()
		try:
			test_repo = os.path.join(tempdir, "var", "repositories", "test_repo")
			os.makedirs(os.path.join(test_repo, "profiles"))
			with open(os.path.join(test_repo, "profiles", "repo_name"), "w") as f:
				f.write("test_repo")
			env = {
				"PORTAGE_REPOSITORIES": "[DEFAULT]\nmain-repo = test_repo\n[test_repo]\nlocation = %s" % test_repo
			}

			# Tests may override portage.const.EPREFIX in order to
			# simulate a prefix installation. It's reasonable to do
			# this because tests should be self-contained such that
			# the "real" value of portage.const.EPREFIX is entirely
			# irrelevant (see bug #492932).
			portage.const.EPREFIX = tempdir

			fakedb = fakedbapi(settings=config(config_profile_path="",
				env=env, eprefix=tempdir))
			for cpv, metadata in packages:
				fakedb.cpv_inject(cpv, metadata=metadata)

			for atom, expected_result in match_tests:
				result = fakedb.match(atom)
				self.assertEqual(fakedb.match(atom), expected_result,
					"fakedb.match('%s') = %s != %s" %
					(atom, result, expected_result))
		finally:
			shutil.rmtree(tempdir)
示例#18
0
	def _create_installed(self, installed):
		for cpv in installed:
			a = Atom("=" + cpv, allow_repo=True)
			repo = a.repo
			if repo is None:
				repo = "test_repo"

			vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
			try:
				os.makedirs(vdb_pkg_dir)
			except os.error:
				pass

			metadata = installed[cpv].copy()
			eapi = metadata.pop("EAPI", 0)
			lic = metadata.pop("LICENSE", "")
			properties = metadata.pop("PROPERTIES", "")
			slot = metadata.pop("SLOT", 0)
			build_time = metadata.pop("BUILD_TIME", "0")
			keywords = metadata.pop("KEYWORDS", "~x86")
			iuse = metadata.pop("IUSE", "")
			use = metadata.pop("USE", "")
			provide = metadata.pop("PROVIDE", None)
			depend = metadata.pop("DEPEND", "")
			rdepend = metadata.pop("RDEPEND", None)
			pdepend = metadata.pop("PDEPEND", None)
			required_use = metadata.pop("REQUIRED_USE", None)

			if metadata:
				raise ValueError("metadata of installed '%s' contains unknown keys: %s" % (cpv, metadata.keys()))

			def write_key(key, value):
				f = open(os.path.join(vdb_pkg_dir, key), "w")
				f.write(str(value) + "\n")
				f.close()
			
			write_key("EAPI", eapi)
			write_key("BUILD_TIME", build_time)
			write_key("COUNTER", "0")
			write_key("LICENSE", lic)
			write_key("PROPERTIES", properties)
			write_key("SLOT", slot)
			write_key("LICENSE", lic)
			write_key("PROPERTIES", properties)
			write_key("repository", repo)
			write_key("KEYWORDS", keywords)
			write_key("IUSE", iuse)
			write_key("USE", use)
			if provide is not None:
				write_key("PROVIDE", provide)
			write_key("DEPEND", depend)
			if rdepend is not None:
				write_key("RDEPEND", rdepend)
			if pdepend is not None:
				write_key("PDEPEND", pdepend)
			if required_use is not None:
				write_key("REQUIRED_USE", required_use)
示例#19
0
文件: rsync.py 项目: helb/portage
 def new(self, **kwargs):
     if kwargs:
         self._kwargs(kwargs)
     try:
         if not os.path.exists(self.repo.location):
             os.makedirs(self.repo.location)
             self.logger(self.self.xterm_titles, "Created New Directory %s " % self.repo.location)
     except IOError:
         return (1, False)
     return self.update()
示例#20
0
	def _create_world(self, world):
		#Create /var/lib/portage/world
		var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage")
		os.makedirs(var_lib_portage)

		world_file = os.path.join(var_lib_portage, "world")

		f = open(world_file, "w")
		for atom in world:
			f.write("%s\n" % atom)
		f.close()
示例#21
0
def rcs_archive(archive, curconf, newconf, mrgconf):
	"""Archive existing config in rcs (on trunk). Then, if mrgconf is
	specified and an old branch version exists, merge the user's changes
	and the distributed changes and put the result into mrgconf.  Lastly,
	if newconf was specified, leave it in the archive dir with a .dist.new
	suffix along with the last 1.1.1 branch version with a .dist suffix."""

	try:
		os.makedirs(os.path.dirname(archive))
	except OSError:
		pass

	try:
		curconf_st = os.lstat(curconf)
	except OSError:
		curconf_st = None

	if curconf_st is not None and \
		(stat.S_ISREG(curconf_st.st_mode) or
		stat.S_ISLNK(curconf_st.st_mode)):
		_archive_copy(curconf_st, curconf, archive)

	if os.path.lexists(archive + ',v'):
		os.system(RCS_LOCK + ' ' + archive)
	os.system(RCS_PUT + ' ' + archive)

	ret = 0
	mystat = None
	if newconf:
		try:
			mystat = os.lstat(newconf)
		except OSError:
			pass

	if mystat is not None and \
		(stat.S_ISREG(mystat.st_mode) or
		stat.S_ISLNK(mystat.st_mode)):
		os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
		has_branch = os.path.lexists(archive)
		if has_branch:
			os.rename(archive, archive + '.dist')

		_archive_copy(mystat, newconf, archive)

		if has_branch:
			if mrgconf and os.path.isfile(archive) and \
				os.path.isfile(mrgconf):
				# This puts the results of the merge into mrgconf.
				ret = os.system(RCS_MERGE % (archive, mrgconf))
				os.chmod(mrgconf, mystat.st_mode)
				os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
		os.rename(archive, archive + '.dist.new')

	return ret
示例#22
0
文件: xpak.py 项目: amadio/portage
	def decompose(self, datadir, cleanup=1):
		"""Alias for unpackinfo() --- Complement to recompose() but optionally
		deletes the destination directory. Extracts the xpak from the tbz2 into
		the directory provided. Raises IOError if scan() fails.
		Returns result of upackinfo()."""
		if not self.scan():
			raise IOError
		if cleanup:
			self.cleanup(datadir)
		if not os.path.exists(datadir):
			os.makedirs(datadir)
		return self.unpackinfo(datadir)
示例#23
0
	def __init__(self, ebuilds={}, binpkgs={}, installed={}, profile={}, repo_configs={}, \
		user_config={}, sets={}, world=[], world_sets=[], distfiles={},
		eprefix=None, targetroot=False, debug=False):
		"""
		ebuilds: cpv -> metadata mapping simulating available ebuilds.
		installed: cpv -> metadata mapping simulating installed packages.
			If a metadata key is missing, it gets a default value.
		profile: settings defined by the profile.
		"""

		self.debug = debug
		if eprefix is None:
			self.eprefix = normalize_path(tempfile.mkdtemp())
		else:
			self.eprefix = normalize_path(eprefix)

		# Tests may override portage.const.EPREFIX in order to
		# simulate a prefix installation. It's reasonable to do
		# this because tests should be self-contained such that
		# the "real" value of portage.const.EPREFIX is entirely
		# irrelevant (see bug #492932).
		portage.const.EPREFIX = self.eprefix.rstrip(os.sep)

		self.eroot = self.eprefix + os.sep
		if targetroot:
			self.target_root = os.path.join(self.eroot, 'target_root')
		else:
			self.target_root = os.sep
		self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
		self.pkgdir = os.path.join(self.eprefix, "pkgdir")
		self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
		os.makedirs(self.vdbdir)

		if not debug:
			portage.util.noiselimit = -2

		self._repositories = {}
		#Make sure the main repo is always created
		self._get_repo_dir("test_repo")

		self._create_distfiles(distfiles)
		self._create_ebuilds(ebuilds)
		self._create_binpkgs(binpkgs)
		self._create_installed(installed)
		self._create_profile(ebuilds, installed, profile, repo_configs, user_config, sets)
		self._create_world(world, world_sets)

		self.settings, self.trees = self._load_config()

		self._create_ebuild_manifests(ebuilds)

		portage.util.noiselimit = 0
示例#24
0
 def makedirs(dir_path):
     try:
         os.makedirs(dir_path)
     except OSError as oe:
         if errno.EEXIST == oe.errno:
             pass
         elif errno.EPERM == oe.errno:
             writemsg("%s\n" % oe, noiselevel=-1)
             writemsg(_("Operation Not Permitted: makedirs('%s')\n") % dir_path, noiselevel=-1)
             return False
         else:
             raise
     return True
    def test_gpkg_get_metadata_url(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        if sys.version_info.major == 3 and sys.version_info.minor <= 6:
            self.skipTest("http server not support change root dir")

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'BINPKG_COMPRESS="gzip"',
                    'FEATURES="${FEATURES} -binpkg-signing '
                    '-binpkg-request-signature"',
                ),
            })
        tmpdir = tempfile.mkdtemp()
        try:
            settings = playground.settings
            for _ in range(0, 5):
                port = random.randint(30000, 60000)
                try:
                    server = self.start_http_server(tmpdir, port)
                except OSError:
                    continue
                break

            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
                test_file.write(urandom(1048576))

            gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
            test_gpkg = gpkg(settings, "test", gpkg_file_loc)

            meta = {
                "test1": b"{abcdefghijklmnopqrstuvwxyz, 1234567890}",
                "test2": urandom(102400),
            }

            test_gpkg.compress(os.path.join(tmpdir, "orig"), meta)

            meta_from_url = test_gpkg.get_metadata_url("http://127.0.0.1:" +
                                                       str(port) +
                                                       "/test.gpkg.tar")

            self.assertEqual(meta, meta_from_url)
        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#26
0
	def makedirs(dir_path):
		try:
			os.makedirs(dir_path)
		except OSError as oe:
			if errno.EEXIST == oe.errno:
				pass
			elif errno.EPERM == oe.errno:
				writemsg("%s\n" % oe, noiselevel=-1)
				writemsg(_("Operation Not Permitted: makedirs('%s')\n") % \
					dir_path, noiselevel=-1)
				return False
			else:
				raise
		return True
示例#27
0
    def _create_installed(self, installed):
        for cpv in installed:
            a = Atom("=" + cpv, allow_repo=True)
            repo = a.repo
            if repo is None:
                repo = "test_repo"

            vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
            try:
                os.makedirs(vdb_pkg_dir)
            except os.error:
                pass

            metadata = installed[cpv].copy()
            metadata.setdefault("SLOT", "0")
            metadata.setdefault("BUILD_TIME", "0")
            metadata.setdefault("COUNTER", "0")
            metadata.setdefault("KEYWORDS", "~x86")

            unknown_keys = set(metadata).difference(
                portage.dbapi.dbapi._known_keys)
            unknown_keys.discard("BUILD_TIME")
            unknown_keys.discard("BUILD_ID")
            unknown_keys.discard("COUNTER")
            unknown_keys.discard("repository")
            unknown_keys.discard("USE")
            unknown_keys.discard("PROVIDES")
            unknown_keys.discard("REQUIRES")
            if unknown_keys:
                raise ValueError(
                    "metadata of installed '%s' contains unknown keys: %s" %
                    (cpv, sorted(unknown_keys)))

            metadata["repository"] = repo
            for k, v in metadata.items():
                with open(os.path.join(vdb_pkg_dir, k), "w") as f:
                    f.write("%s\n" % v)

            ebuild_path = os.path.join(vdb_pkg_dir,
                                       a.cpv.split("/")[1] + ".ebuild")
            with open(ebuild_path, "w") as f:
                f.write('EAPI="%s"\n' % metadata.pop("EAPI", "0"))
                for k, v in metadata.items():
                    f.write('%s="%s"\n' % (k, v))

            env_path = os.path.join(vdb_pkg_dir, "environment.bz2")
            with bz2.BZ2File(env_path, mode="w") as f:
                with open(ebuild_path, "rb") as inputfile:
                    f.write(inputfile.read())
示例#28
0
    def test_gpkg_incorrect_checksum(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'FEATURES="${FEATURES} -binpkg-signing '
                    '-binpkg-request-signature -gpg-keepalive"',
                ),
            }
        )
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            data = urandom(1048576)
            with open(os.path.join(orig_full_path, "data"), "wb") as f:
                f.write(data)

            binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
            binpkg_1.compress(orig_full_path, {})

            with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
                with tarfile.open(
                    os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
                ) as tar_2:
                    for f in tar_1.getmembers():
                        if f.name == "Manifest":
                            data = io.BytesIO(tar_1.extractfile(f).read())
                            data_view = data.getbuffer()
                            data_view[-16:] = b"20a6d80ab0320fh9"
                            del data_view
                            tar_2.addfile(f, data)
                            data.close()
                        else:
                            tar_2.addfile(f, tar_1.extractfile(f))

            binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))

            self.assertRaises(
                DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
            )
        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#29
0
	def _create_world(self, world, world_sets):
		#Create /var/lib/portage/world
		var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage")
		os.makedirs(var_lib_portage)

		world_file = os.path.join(var_lib_portage, "world")
		world_set_file = os.path.join(var_lib_portage, "world_sets")

		with open(world_file, "w") as f:
			for atom in world:
				f.write("%s\n" % atom)

		with open(world_set_file, "w") as f:
			for atom in world_sets:
				f.write("%s\n" % atom)
示例#30
0
    def _create_world(self, world, world_sets):
        # Create /var/lib/portage/world
        var_lib_portage = os.path.join(self.eroot, "var", "lib", "portage")
        os.makedirs(var_lib_portage)

        world_file = os.path.join(var_lib_portage, "world")
        world_set_file = os.path.join(var_lib_portage, "world_sets")

        with open(world_file, "w") as f:
            for atom in world:
                f.write("%s\n" % atom)

        with open(world_set_file, "w") as f:
            for atom in world_sets:
                f.write("%s\n" % atom)
示例#31
0
	def testFakedbapi(self):
		packages = (
			("sys-apps/portage-2.1.10", {
				"EAPI"         : "2",
				"IUSE"         : "ipc doc",
				"repository"   : "gentoo",
				"SLOT"         : "0",
				"USE"          : "ipc missing-iuse",
			}),
			("virtual/package-manager-0", {
				"EAPI"         : "0",
				"repository"   : "gentoo",
				"SLOT"         : "0",
			}),
		)

		match_tests = (
			("sys-apps/portage:0[ipc]",             ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0[-ipc]",            []),
			("sys-apps/portage:0[doc]",             []),
			("sys-apps/portage:0[-doc]",            ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0",                  ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0[missing-iuse]",    []),
			("sys-apps/portage:0[-missing-iuse]",   []),
			("sys-apps/portage:0::gentoo[ipc]",     ["sys-apps/portage-2.1.10"]),
			("sys-apps/portage:0::multilib[ipc]",   []),
			("virtual/package-manager",             ["virtual/package-manager-0"]),
		)

		tempdir = tempfile.mkdtemp()
		try:
			portdir = os.path.join(tempdir, "usr/portage")
			os.makedirs(portdir)
			env = {
				"PORTDIR": portdir,
			}
			fakedb = fakedbapi(settings=config(config_profile_path="",
				env=env, eprefix=tempdir))
			for cpv, metadata in packages:
				fakedb.cpv_inject(cpv, metadata=metadata)

			for atom, expected_result in match_tests:
				result = fakedb.match(atom)
				self.assertEqual(fakedb.match(atom), expected_result,
					"fakedb.match('%s') = %s != %s" %
					(atom, result, expected_result))
		finally:
			shutil.rmtree(tempdir)
示例#32
0
    def test_gpkg_missing_signature(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'FEATURES="${FEATURES} binpkg-signing '
                    'binpkg-request-signature"',
                    'BINPKG_FORMAT="gpkg"',
                ),
            })
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            gpg = GPG(settings)
            gpg.unlock()
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            data = urandom(1048576)
            with open(os.path.join(orig_full_path, "data"), "wb") as f:
                f.write(data)

            binpkg_1 = gpkg(settings, "test",
                            os.path.join(tmpdir, "test-1.gpkg.tar"))
            binpkg_1.compress(orig_full_path, {})

            with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"),
                              "r") as tar_1:
                with tarfile.open(os.path.join(tmpdir, "test-2.gpkg.tar"),
                                  "w") as tar_2:
                    for f in tar_1.getmembers():
                        if f.name.endswith(".sig"):
                            pass
                        else:
                            tar_2.addfile(f, tar_1.extractfile(f))

            binpkg_2 = gpkg(settings, "test",
                            os.path.join(tmpdir, "test-2.gpkg.tar"))
            self.assertRaises(MissingSignature, binpkg_2.decompress,
                              os.path.join(tmpdir, "test"))

        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#33
0
    def test_gpkg_extra_files(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'FEATURES="${FEATURES} -binpkg-signing '
                    '-binpkg-request-signature -gpg-keepalive"',
                ),
            }
        )
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            data = urandom(1048576)
            with open(os.path.join(orig_full_path, "data"), "wb") as f:
                f.write(data)

            binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
            binpkg_1.compress(orig_full_path, {})

            with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
                with tarfile.open(
                    os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
                ) as tar_2:
                    for f in tar_1.getmembers():
                        tar_2.addfile(f, tar_1.extractfile(f))
                    data_tarinfo = tarfile.TarInfo("data2")
                    data_tarinfo.size = len(data)
                    data2 = io.BytesIO(data)
                    tar_2.addfile(data_tarinfo, data2)
                    data2.close()

            binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))

            self.assertRaises(
                DigestException, binpkg_2.decompress, os.path.join(tmpdir, "test")
            )
        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#34
0
    def _create_installed(self, installed):
        for cpv in installed:
            a = Atom("=" + cpv)
            vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
            try:
                os.makedirs(vdb_pkg_dir)
            except os.error:
                pass

            metadata = installed[cpv].copy()
            eapi = metadata.pop("EAPI", 0)
            lic = metadata.pop("LICENSE", "")
            properties = metadata.pop("PROPERTIES", "")
            slot = metadata.pop("SLOT", 0)
            keywords = metadata.pop("KEYWORDS", "~x86")
            iuse = metadata.pop("IUSE", "")
            use = metadata.pop("USE", "")
            depend = metadata.pop("DEPEND", "")
            rdepend = metadata.pop("RDEPEND", None)
            pdepend = metadata.pop("PDEPEND", None)
            required_use = metadata.pop("REQUIRED_USE", None)

            if metadata:
                raise ValueError(
                    "metadata of installed '%s' contains unknown keys: %s" %
                    (cpv, metadata.keys()))

            def write_key(key, value):
                f = open(os.path.join(vdb_pkg_dir, key), "w")
                f.write(str(value) + "\n")
                f.close()

            write_key("EAPI", eapi)
            write_key("LICENSE", lic)
            write_key("PROPERTIES", properties)
            write_key("SLOT", slot)
            write_key("KEYWORDS", keywords)
            write_key("IUSE", iuse)
            write_key("USE", use)
            write_key("DEPEND", depend)
            if rdepend is not None:
                write_key("RDEPEND", rdepend)
            if pdepend is not None:
                write_key("PDEPEND", pdepend)
            if required_use is not None:
                write_key("REQUIRED_USE", required_use)
示例#35
0
    def testFakedbapi(self):
        packages = (
            ("sys-apps/portage-2.1.10", {
                "EAPI": "2",
                "IUSE": "ipc doc",
                "repository": "gentoo",
                "SLOT": "0",
                "USE": "ipc missing-iuse",
            }),
            ("virtual/package-manager-0", {
                "EAPI": "0",
                "repository": "gentoo",
                "SLOT": "0",
            }),
        )

        match_tests = (
            ("sys-apps/portage:0[ipc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0[-ipc]", []),
            ("sys-apps/portage:0[doc]", []),
            ("sys-apps/portage:0[-doc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0[missing-iuse]", []),
            ("sys-apps/portage:0[-missing-iuse]", []),
            ("sys-apps/portage:0::gentoo[ipc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0::multilib[ipc]", []),
            ("virtual/package-manager", ["virtual/package-manager-0"]),
        )

        tempdir = tempfile.mkdtemp()
        try:
            portdir = os.path.join(tempdir, "usr/portage")
            os.makedirs(portdir)
            env = {
                "PORTDIR": portdir,
            }
            fakedb = fakedbapi(settings=config(
                config_profile_path="", env=env, eprefix=tempdir))
            for cpv, metadata in packages:
                fakedb.cpv_inject(cpv, metadata=metadata)

            for atom, expected_result in match_tests:
                self.assertEqual(fakedb.match(atom), expected_result)
        finally:
            shutil.rmtree(tempdir)
示例#36
0
    def new(self, **kwargs):
        '''Do the initial clone of the repository'''
        if kwargs:
            self._kwargs(kwargs)
        try:
            if not os.path.exists(self.repo.location):
                os.makedirs(self.repo.location)
                self.logger(self.xterm_titles,
                            'Created new directory %s' % self.repo.location)
        except IOError:
            return (1, False)

        sync_uri = self.repo.sync_uri
        if sync_uri.startswith("file://"):
            sync_uri = sync_uri[6:]

        git_cmd_opts = ""
        if self.settings.get("PORTAGE_QUIET") == "1":
            git_cmd_opts += " --quiet"
        if self.repo.clone_depth is not None:
            if self.repo.clone_depth != 0:
                git_cmd_opts += " --depth %d" % self.repo.clone_depth
        elif self.repo.sync_depth is not None:
            if self.repo.sync_depth != 0:
                git_cmd_opts += " --depth %d" % self.repo.sync_depth
        else:
            # default
            git_cmd_opts += " --depth 1"
        if self.repo.module_specific_options.get('sync-git-clone-extra-opts'):
            git_cmd_opts += " %s" % self.repo.module_specific_options[
                'sync-git-clone-extra-opts']
        git_cmd = "%s clone%s %s ." % (self.bin_command, git_cmd_opts,
                                       portage._shell_quote(sync_uri))
        writemsg_level(git_cmd + "\n")

        exitcode = portage.process.spawn_bash(
            "cd %s ; exec %s" %
            (portage._shell_quote(self.repo.location), git_cmd),
            **self.spawn_kwargs)
        if exitcode != os.EX_OK:
            msg = "!!! git clone error in %s" % self.repo.location
            self.logger(self.xterm_titles, msg)
            writemsg_level(msg + "\n", level=logging.ERROR, noiselevel=-1)
            return (exitcode, False)
        return (os.EX_OK, True)
示例#37
0
    def test_gpkg_untrusted_signature(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        gpg_test_path = os.environ["PORTAGE_GNUPGHOME"]

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'FEATURES="${FEATURES} binpkg-signing '
                    'binpkg-request-signature"',
                    'BINPKG_FORMAT="gpkg"',
                    f'BINPKG_GPG_SIGNING_BASE_COMMAND="flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --batch --no-tty --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]"',
                    'BINPKG_GPG_SIGNING_DIGEST="SHA512"',
                    f'BINPKG_GPG_SIGNING_GPG_HOME="{gpg_test_path}"',
                    'BINPKG_GPG_SIGNING_KEY="0x8812797DDF1DD192"',
                    'BINPKG_GPG_VERIFY_BASE_COMMAND="/usr/bin/gpg --verify --batch --no-tty --yes --no-auto-check-trustdb --status-fd 1 [PORTAGE_CONFIG] [SIGNATURE]"',
                    f'BINPKG_GPG_VERIFY_GPG_HOME="{gpg_test_path}"',
                ),
            })
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            gpg = GPG(settings)
            gpg.unlock()
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            data = urandom(1048576)
            with open(os.path.join(orig_full_path, "data"), "wb") as f:
                f.write(data)

            binpkg_1 = gpkg(settings, "test",
                            os.path.join(tmpdir, "test-1.gpkg.tar"))
            binpkg_1.compress(orig_full_path, {})

            binpkg_2 = gpkg(settings, "test",
                            os.path.join(tmpdir, "test-1.gpkg.tar"))
            self.assertRaises(InvalidSignature, binpkg_2.decompress,
                              os.path.join(tmpdir, "test"))

        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#38
0
    def test_gpkg_different_size_file(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")

        playground = ResolverPlayground(
            user_config={
                "make.conf": (
                    'FEATURES="${FEATURES} -binpkg-signing '
                    '-binpkg-request-signature -gpg-keepalive"',
                ),
            }
        )
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)

            data = urandom(100)
            with open(os.path.join(orig_full_path, "data"), "wb") as f:
                f.write(data)

            binpkg_1 = gpkg(settings, "test", os.path.join(tmpdir, "test-1.gpkg.tar"))
            binpkg_1.compress(orig_full_path, {})

            with tarfile.open(os.path.join(tmpdir, "test-1.gpkg.tar"), "r") as tar_1:
                with tarfile.open(
                    os.path.join(tmpdir, "test-2.gpkg.tar"), "w"
                ) as tar_2:
                    for f in tar_1.getmembers():
                        tar_2.addfile(f, tar_1.extractfile(f))
                        tar_2.addfile(f, tar_1.extractfile(f))

            binpkg_2 = gpkg(settings, "test", os.path.join(tmpdir, "test-2.gpkg.tar"))

            self.assertRaises(
                InvalidBinaryPackageFormat,
                binpkg_2.decompress,
                os.path.join(tmpdir, "test"),
            )
        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#39
0
 def unpackinfo(self, mydest):
     """Unpacks all the files from the dataSegment into 'mydest'."""
     if not self.scan():
         return 0
     try:
         origdir = os.getcwd()
     except SystemExit as e:
         raise
     except:
         os.chdir("/")
         origdir = "/"
     a = open(
         _unicode_encode(self.file,
                         encoding=_encodings['fs'],
                         errors='strict'), 'rb')
     if not os.path.exists(mydest):
         os.makedirs(mydest)
     os.chdir(mydest)
     startpos = 0
     while ((startpos + 8) < self.indexsize):
         namelen = decodeint(self.index[startpos:startpos + 4])
         datapos = decodeint(self.index[startpos + 4 + namelen:startpos +
                                        8 + namelen])
         datalen = decodeint(self.index[startpos + 8 + namelen:startpos +
                                        12 + namelen])
         myname = self.index[startpos + 4:startpos + 4 + namelen]
         myname = _unicode_decode(myname,
                                  encoding=_encodings['repo.content'],
                                  errors='replace')
         dirname = os.path.dirname(myname)
         if dirname:
             if not os.path.exists(dirname):
                 os.makedirs(dirname)
         mydat = open(
             _unicode_encode(myname,
                             encoding=_encodings['fs'],
                             errors='strict'), 'wb')
         a.seek(self.datapos + datapos)
         mydat.write(a.read(datalen))
         mydat.close()
         startpos = startpos + namelen + 12
     a.close()
     os.chdir(origdir)
     return 1
示例#40
0
    def _create_ebuilds(self, ebuilds):
        for cpv in ebuilds:
            a = Atom("=" + cpv)
            ebuild_dir = os.path.join(self.portdir, a.cp)
            ebuild_path = os.path.join(ebuild_dir,
                                       a.cpv.split("/")[1] + ".ebuild")
            try:
                os.makedirs(ebuild_dir)
            except os.error:
                pass

            metadata = ebuilds[cpv].copy()
            eapi = metadata.pop("EAPI", 0)
            lic = metadata.pop("LICENSE", 0)
            properties = metadata.pop("PROPERTIES", "")
            slot = metadata.pop("SLOT", 0)
            keywords = metadata.pop("KEYWORDS", "x86")
            iuse = metadata.pop("IUSE", "")
            depend = metadata.pop("DEPEND", "")
            rdepend = metadata.pop("RDEPEND", None)
            pdepend = metadata.pop("PDEPEND", None)
            required_use = metadata.pop("REQUIRED_USE", None)

            if metadata:
                raise ValueError(
                    "metadata of ebuild '%s' contains unknown keys: %s" %
                    (cpv, metadata.keys()))

            f = open(ebuild_path, "w")
            f.write('EAPI="' + str(eapi) + '"\n')
            f.write('LICENSE="' + str(lic) + '"\n')
            f.write('PROPERTIES="' + str(properties) + '"\n')
            f.write('SLOT="' + str(slot) + '"\n')
            f.write('KEYWORDS="' + str(keywords) + '"\n')
            f.write('IUSE="' + str(iuse) + '"\n')
            f.write('DEPEND="' + str(depend) + '"\n')
            if rdepend is not None:
                f.write('RDEPEND="' + str(rdepend) + '"\n')
            if pdepend is not None:
                f.write('PDEPEND="' + str(pdepend) + '"\n')
            if required_use is not None:
                f.write('REQUIRED_USE="' + str(required_use) + '"\n')
            f.close()
示例#41
0
    def __init__(self, ebuilds={}, binpkgs={}, installed={}, profile={}, repo_configs={}, \
     user_config={}, sets={}, world=[], world_sets=[], distfiles={},
     targetroot=False, debug=False):
        """
		ebuilds: cpv -> metadata mapping simulating available ebuilds.
		installed: cpv -> metadata mapping simulating installed packages.
			If a metadata key is missing, it gets a default value.
		profile: settings defined by the profile.
		"""
        self.debug = debug
        self.eprefix = normalize_path(tempfile.mkdtemp())
        portage.const.EPREFIX = self.eprefix.rstrip(os.sep)

        self.eroot = self.eprefix + os.sep
        if targetroot:
            self.target_root = os.path.join(self.eroot, 'target_root')
        else:
            self.target_root = os.sep
        self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
        self.pkgdir = os.path.join(self.eprefix, "pkgdir")
        self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
        os.makedirs(self.vdbdir)

        if not debug:
            portage.util.noiselimit = -2

        self._repositories = {}
        #Make sure the main repo is always created
        self._get_repo_dir("test_repo")

        self._create_distfiles(distfiles)
        self._create_ebuilds(ebuilds)
        self._create_binpkgs(binpkgs)
        self._create_installed(installed)
        self._create_profile(ebuilds, installed, profile, repo_configs,
                             user_config, sets)
        self._create_world(world, world_sets)

        self.settings, self.trees = self._load_config()

        self._create_ebuild_manifests(ebuilds)

        portage.util.noiselimit = 0
    def test_gpkg_update_metadata(self):
        if sys.version_info.major < 3:
            self.skipTest("Not support Python 2")
        playground = ResolverPlayground(
            user_config={
                "make.conf": ('BINPKG_COMPRESS="gzip"',),
            }
        )
        tmpdir = tempfile.mkdtemp()

        try:
            settings = playground.settings
            orig_full_path = os.path.join(tmpdir, "orig/")
            os.makedirs(orig_full_path)
            with open(os.path.join(orig_full_path, "test"), "wb") as test_file:
                test_file.write(urandom(1048576))

            gpkg_file_loc = os.path.join(tmpdir, "test.gpkg.tar")
            test_gpkg = gpkg(settings, "test", gpkg_file_loc)

            meta = {"test1": b"1234567890", "test2": b"abcdef"}

            test_gpkg.compress(os.path.join(tmpdir, "orig"), meta)

            meta_result = test_gpkg.get_metadata()
            self.assertEqual(meta, meta_result)

            meta_new = {"test3": b"0987654321", "test4": b"XXXXXXXX"}
            test_gpkg.update_metadata(meta_new)

            meta_result = test_gpkg.get_metadata()
            self.assertEqual(meta_new, meta_result)

            test_gpkg.decompress(os.path.join(tmpdir, "test"))
            r = compare_files(
                os.path.join(tmpdir, "orig/" + "test"),
                os.path.join(tmpdir, "test/" + "test"),
                skipped_types=("atime", "mtime", "ctime"),
            )
            self.assertEqual(r, ())
        finally:
            shutil.rmtree(tmpdir)
            playground.cleanup()
示例#43
0
 def unpackinfo(self, mydest):
     """Unpacks all the files from the dataSegment into 'mydest'."""
     if not self.scan():
         return 0
     mydest = normalize_path(mydest) + os.sep
     a = open(
         _unicode_encode(self.file, encoding=_encodings["fs"], errors="strict"), "rb"
     )
     if not os.path.exists(mydest):
         os.makedirs(mydest)
     startpos = 0
     while (startpos + 8) < self.indexsize:
         namelen = decodeint(self.index[startpos : startpos + 4])
         datapos = decodeint(
             self.index[startpos + 4 + namelen : startpos + 8 + namelen]
         )
         datalen = decodeint(
             self.index[startpos + 8 + namelen : startpos + 12 + namelen]
         )
         myname = self.index[startpos + 4 : startpos + 4 + namelen]
         myname = _unicode_decode(
             myname, encoding=_encodings["repo.content"], errors="replace"
         )
         filename = os.path.join(mydest, myname.lstrip(os.sep))
         filename = normalize_path(filename)
         if not filename.startswith(mydest):
             # myname contains invalid ../ component(s)
             continue
         dirname = os.path.dirname(filename)
         if dirname:
             if not os.path.exists(dirname):
                 os.makedirs(dirname)
         mydat = open(
             _unicode_encode(filename, encoding=_encodings["fs"], errors="strict"),
             "wb",
         )
         a.seek(self.datapos + datapos)
         mydat.write(a.read(datalen))
         mydat.close()
         startpos = startpos + namelen + 12
     a.close()
     return 1
示例#44
0
    def testFakedbapi(self):
        packages = (
            (
                "sys-apps/portage-2.1.10",
                {"EAPI": "2", "IUSE": "ipc doc", "repository": "gentoo", "SLOT": "0", "USE": "ipc missing-iuse"},
            ),
            ("virtual/package-manager-0", {"EAPI": "0", "repository": "gentoo", "SLOT": "0"}),
        )

        match_tests = (
            ("sys-apps/portage:0[ipc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0[-ipc]", []),
            ("sys-apps/portage:0[doc]", []),
            ("sys-apps/portage:0[-doc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0[missing-iuse]", []),
            ("sys-apps/portage:0[-missing-iuse]", []),
            ("sys-apps/portage:0::gentoo[ipc]", ["sys-apps/portage-2.1.10"]),
            ("sys-apps/portage:0::multilib[ipc]", []),
            ("virtual/package-manager", ["virtual/package-manager-0"]),
        )

        tempdir = tempfile.mkdtemp()
        try:
            test_repo = os.path.join(tempdir, "var", "repositories", "test_repo")
            os.makedirs(os.path.join(test_repo, "profiles"))
            with open(os.path.join(test_repo, "profiles", "repo_name"), "w") as f:
                f.write("test_repo")
            env = {"PORTAGE_REPOSITORIES": "[DEFAULT]\nmain-repo = test_repo\n[test_repo]\nlocation = %s" % test_repo}
            fakedb = fakedbapi(settings=config(config_profile_path="", env=env, eprefix=tempdir))
            for cpv, metadata in packages:
                fakedb.cpv_inject(cpv, metadata=metadata)

            for atom, expected_result in match_tests:
                result = fakedb.match(atom)
                self.assertEqual(
                    fakedb.match(atom),
                    expected_result,
                    "fakedb.match('%s') = %s != %s" % (atom, result, expected_result),
                )
        finally:
            shutil.rmtree(tempdir)
示例#45
0
    def BuildTmp(self, tmp_subdir):
        tmp_dir = mkdtemp()
        hooks_dir = tmp_dir + "/" + tmp_subdir
        os.makedirs(hooks_dir)

        f = open(hooks_dir + "/1-testhook", "w")
        f.write("#!/bin/bash\n")
        f.write('export hookonlytest="portage cannot see me!"\n')
        f.write("exit 0\n")
        f.close()

        f = open(hooks_dir + "/2-testhook", "w")
        f.write("#!/bin/bash\n")
        f.write(
            'if [[ "${hookonlytest}" != "" ]]; then echo "Unexpected hookonlytest value: ${hookonlytest}"; exit 1; fi\n'
        )
        f.write("exit 0\n")
        f.close()

        return tmp_dir
示例#46
0
    def BuildTmp(self, tmp_subdir):
        tmp_dir = mkdtemp()
        hooks_dir = tmp_dir + '/' + tmp_subdir
        os.makedirs(hooks_dir)

        f = open(hooks_dir + '/1-testhook', 'w')
        f.write('#!/bin/bash\n')
        f.write('export hookonlytest="portage cannot see me!"\n')
        f.write('exit 0\n')
        f.close()

        f = open(hooks_dir + '/2-testhook', 'w')
        f.write('#!/bin/bash\n')
        f.write(
            'if [[ "${hookonlytest}" != "" ]]; then echo "Unexpected hookonlytest value: ${hookonlytest}"; exit 1; fi\n'
        )
        f.write('exit 0\n')
        f.close()

        return tmp_dir
    def _create_ebuilds(self, ebuilds):
        for cpv in ebuilds:
            a = Atom("=" + cpv, allow_repo=True)
            repo = a.repo
            if repo is None:
                repo = "test_repo"

            metadata = ebuilds[cpv].copy()
            copyright_header = metadata.pop("COPYRIGHT_HEADER", None)
            eapi = metadata.pop("EAPI", "0")
            misc_content = metadata.pop("MISC_CONTENT", None)
            metadata.setdefault("DEPEND", "")
            metadata.setdefault("SLOT", "0")
            metadata.setdefault("KEYWORDS", "x86")
            metadata.setdefault("IUSE", "")

            unknown_keys = set(metadata).difference(
                portage.dbapi.dbapi._known_keys)
            if unknown_keys:
                raise ValueError(
                    "metadata of ebuild '%s' contains unknown keys: %s" %
                    (cpv, sorted(unknown_keys)))

            repo_dir = self._get_repo_dir(repo)
            ebuild_dir = os.path.join(repo_dir, a.cp)
            ebuild_path = os.path.join(ebuild_dir,
                                       a.cpv.split("/")[1] + ".ebuild")
            try:
                os.makedirs(ebuild_dir)
            except os.error:
                pass

            f = open(ebuild_path, "w")
            if copyright_header is not None:
                f.write(copyright_header)
            f.write('EAPI="%s"\n' % eapi)
            for k, v in metadata.items():
                f.write('%s="%s"\n' % (k, v))
            if misc_content is not None:
                f.write(misc_content)
            f.close()
示例#48
0
	def _create_ebuilds(self, ebuilds):
		for cpv in ebuilds:
			a = Atom("=" + cpv)
			ebuild_dir = os.path.join(self.portdir, a.cp)
			ebuild_path = os.path.join(ebuild_dir, a.cpv.split("/")[1] + ".ebuild")
			try:
				os.makedirs(ebuild_dir)
			except os.error:
				pass
			
			metadata = ebuilds[cpv].copy()
			eapi = metadata.pop("EAPI", 0)
			lic = metadata.pop("LICENSE", 0)
			properties = metadata.pop("PROPERTIES", "")
			slot = metadata.pop("SLOT", 0)
			keywords = metadata.pop("KEYWORDS", "x86")
			iuse = metadata.pop("IUSE", "")
			depend = metadata.pop("DEPEND", "")
			rdepend = metadata.pop("RDEPEND", None)
			pdepend = metadata.pop("PDEPEND", None)
			required_use = metadata.pop("REQUIRED_USE", None)

			if metadata:
				raise ValueError("metadata of ebuild '%s' contains unknown keys: %s" % (cpv, metadata.keys()))

			f = open(ebuild_path, "w")
			f.write('EAPI="' + str(eapi) + '"\n')
			f.write('LICENSE="' + str(lic) + '"\n')
			f.write('PROPERTIES="' + str(properties) + '"\n')
			f.write('SLOT="' + str(slot) + '"\n')
			f.write('KEYWORDS="' + str(keywords) + '"\n')
			f.write('IUSE="' + str(iuse) + '"\n')
			f.write('DEPEND="' + str(depend) + '"\n')
			if rdepend is not None:
				f.write('RDEPEND="' + str(rdepend) + '"\n')
			if pdepend is not None:
				f.write('PDEPEND="' + str(pdepend) + '"\n')
			if required_use is not None:
				f.write('REQUIRED_USE="' + str(required_use) + '"\n')
			f.close()
示例#49
0
def ensure_dirs(dir_path, **kwargs):
	"""Create a directory and call apply_permissions.
	Returns True if a directory is created or the permissions needed to be
	modified, and False otherwise.

	This function's handling of EEXIST errors makes it useful for atomic
	directory creation, in which multiple processes may be competing to
	create the same directory.
	"""

	created_dir = False

	try:
		os.makedirs(dir_path)
		created_dir = True
	except OSError as oe:
		func_call = "makedirs('%s')" % dir_path
		if oe.errno in (errno.EEXIST,):
			pass
		else:
			if os.path.isdir(dir_path):
				# NOTE: DragonFly raises EPERM for makedir('/')
				# and that is supposed to be ignored here.
				# Also, sometimes mkdir raises EISDIR on FreeBSD
				# and we want to ignore that too (bug #187518).
				pass
			elif oe.errno == errno.EPERM:
				raise OperationNotPermitted(func_call)
			elif oe.errno == errno.EACCES:
				raise PermissionDenied(func_call)
			elif oe.errno == errno.EROFS:
				raise ReadOnlyFileSystem(func_call)
			else:
				raise
	if kwargs:
		perms_modified = apply_permissions(dir_path, **kwargs)
	else:
		perms_modified = False
	return created_dir or perms_modified
示例#50
0
    def _get_repo_dir(self, repo):
        """
        Create the repo directory if needed.
        """
        if repo not in self._repositories:
            if repo == "test_repo":
                self._repositories["DEFAULT"] = {"main-repo": repo}

            repo_path = os.path.join(self.eroot, "var", "repositories", repo)
            self._repositories[repo] = {"location": repo_path}
            profile_path = os.path.join(repo_path, "profiles")

            try:
                os.makedirs(profile_path)
            except os.error:
                pass

            repo_name_file = os.path.join(profile_path, "repo_name")
            with open(repo_name_file, "w") as f:
                f.write("%s\n" % repo)

        return self._repositories[repo]["location"]
示例#51
0
	def _get_repo_dir(self, repo):
		"""
		Create the repo directory if needed.
		"""
		if repo not in self._repositories:
			if repo == "test_repo":
				self._repositories["DEFAULT"] = {"main-repo": repo}

			repo_path = os.path.join(self.eroot, "var", "repositories", repo)
			self._repositories[repo] = {"location": repo_path}
			profile_path = os.path.join(repo_path, "profiles")

			try:
				os.makedirs(profile_path)
			except os.error:
				pass

			repo_name_file = os.path.join(profile_path, "repo_name")
			with open(repo_name_file, "w") as f:
				f.write("%s\n" % repo)

		return self._repositories[repo]["location"]
示例#52
0
    def test_filename_hash_layout_get_filenames(self):
        filename = DistfileName(
            "foo-1.tar.gz",
            digests=dict((algo, checksum_str(b"", hashname=algo))
                         for algo in MANIFEST2_HASH_DEFAULTS),
        )
        layouts = (
            FlatLayout(),
            FilenameHashLayout("SHA1", "4"),
            FilenameHashLayout("SHA1", "8"),
            FilenameHashLayout("SHA1", "8:16"),
            FilenameHashLayout("SHA1", "8:16:24"),
            ContentHashLayout("SHA512", "8:8:8"),
        )

        for layout in layouts:
            distdir = tempfile.mkdtemp()
            try:
                path = os.path.join(distdir, layout.get_path(filename))
                try:
                    os.makedirs(os.path.dirname(path))
                except OSError:
                    pass

                with open(path, "wb") as f:
                    pass

                file_list = list(layout.get_filenames(distdir))
                self.assertTrue(len(file_list) > 0)
                for filename_result in file_list:
                    if isinstance(filename_result, DistfileName):
                        self.assertTrue(
                            filename_result.digests_equal(filename))
                    else:
                        self.assertEqual(filename_result, str(filename))
            finally:
                shutil.rmtree(distdir)
示例#53
0
    def _create_installed(self, installed):
        for cpv in installed:
            a = Atom("=" + cpv, allow_repo=True)
            repo = a.repo
            if repo is None:
                repo = "test_repo"

            vdb_pkg_dir = os.path.join(self.vdbdir, a.cpv)
            try:
                os.makedirs(vdb_pkg_dir)
            except os.error:
                pass

            metadata = installed[cpv].copy()
            metadata.setdefault("SLOT", "0")
            metadata.setdefault("BUILD_TIME", "0")
            metadata.setdefault("COUNTER", "0")
            metadata.setdefault("KEYWORDS", "~x86")

            unknown_keys = set(metadata).difference(
                portage.dbapi.dbapi._known_keys)
            unknown_keys.discard("BUILD_TIME")
            unknown_keys.discard("BUILD_ID")
            unknown_keys.discard("COUNTER")
            unknown_keys.discard("repository")
            unknown_keys.discard("USE")
            unknown_keys.discard("PROVIDES")
            unknown_keys.discard("REQUIRES")
            if unknown_keys:
                raise ValueError(
                    "metadata of installed '%s' contains unknown keys: %s" %
                    (cpv, sorted(unknown_keys)))

            metadata["repository"] = repo
            for k, v in metadata.items():
                with open(os.path.join(vdb_pkg_dir, k), "w") as f:
                    f.write("%s\n" % v)
示例#54
0
    def __init__(self,
                 ebuilds={},
                 installed={},
                 profile={},
                 user_config={},
                 sets={},
                 world=[],
                 debug=False):
        """
		ebuilds: cpv -> metadata mapping simulating avaiable ebuilds. 
		installed: cpv -> metadata mapping simulating installed packages.
			If a metadata key is missing, it gets a default value.
		profile: settings defined by the profile.
		"""
        self.debug = debug
        self.root = "/"
        self.eprefix = tempfile.mkdtemp()
        self.eroot = self.root + self.eprefix.lstrip(os.sep) + os.sep
        self.portdir = os.path.join(self.eroot, "usr/portage")
        self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
        os.makedirs(self.portdir)
        os.makedirs(self.vdbdir)

        if not debug:
            portage.util.noiselimit = -2

        self._create_ebuilds(ebuilds)
        self._create_installed(installed)
        self._create_profile(ebuilds, installed, profile, user_config, sets)
        self._create_world(world)

        self.settings, self.trees = self._load_config()

        self._create_ebuild_manifests(ebuilds)

        portage.util.noiselimit = 0
示例#55
0
    def _get_repo_dir(self, repo):
        """
		Create the repo directory if needed.
		"""
        if repo not in self.repo_dirs:
            if repo == "test_repo":
                repo_path = self.portdir
            else:
                repo_path = os.path.join(self.eroot, "usr", "local", repo)

            self.repo_dirs[repo] = repo_path
            profile_path = os.path.join(repo_path, "profiles")

            try:
                os.makedirs(profile_path)
            except os.error:
                pass

            repo_name_file = os.path.join(profile_path, "repo_name")
            f = open(repo_name_file, "w")
            f.write("%s\n" % repo)
            f.close()

        return self.repo_dirs[repo]
示例#56
0
    def _create_profile(self, ebuilds, eclasses, installed, profile,
                        repo_configs, user_config, sets):

        user_config_dir = os.path.join(self.eroot, USER_CONFIG_PATH)

        try:
            os.makedirs(user_config_dir)
        except os.error:
            pass

        for repo in self._repositories:
            if repo == "DEFAULT":
                continue

            repo_dir = self._get_repo_dir(repo)
            profile_dir = os.path.join(repo_dir, "profiles")
            metadata_dir = os.path.join(repo_dir, "metadata")
            os.makedirs(metadata_dir)

            # Create $REPO/profiles/categories
            categories = set()
            for cpv in ebuilds:
                ebuilds_repo = Atom("=" + cpv, allow_repo=True).repo
                if ebuilds_repo is None:
                    ebuilds_repo = "test_repo"
                if ebuilds_repo == repo:
                    categories.add(catsplit(cpv)[0])

            categories_file = os.path.join(profile_dir, "categories")
            with open(categories_file, "w") as f:
                for cat in categories:
                    f.write(cat + "\n")

            # Create $REPO/profiles/license_groups
            license_file = os.path.join(profile_dir, "license_groups")
            with open(license_file, "w") as f:
                f.write("EULA TEST\n")

            repo_config = repo_configs.get(repo)
            if repo_config:
                for config_file, lines in repo_config.items():
                    if config_file not in self.config_files and not any(
                            fnmatch.fnmatch(config_file, os.path.join(x, "*"))
                            for x in self.config_files):
                        raise ValueError("Unknown config file: '%s'" %
                                         config_file)

                    if config_file in ("layout.conf", ):
                        file_name = os.path.join(repo_dir, "metadata",
                                                 config_file)
                    else:
                        file_name = os.path.join(profile_dir, config_file)
                        if "/" in config_file and not os.path.isdir(
                                os.path.dirname(file_name)):
                            os.makedirs(os.path.dirname(file_name))
                    with open(file_name, "w") as f:
                        for line in lines:
                            f.write("%s\n" % line)
                        # Temporarily write empty value of masters until it becomes default.
                        # TODO: Delete all references to "# use implicit masters" when empty value becomes default.
                        if config_file == "layout.conf" and not any(
                                line.startswith(("masters =",
                                                 "# use implicit masters"))
                                for line in lines):
                            f.write("masters =\n")

            # Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there)
            eclass_dir = os.path.join(repo_dir, "eclass")
            os.makedirs(eclass_dir)

            for eclass_name, eclass_content in eclasses.items():
                with open(
                        os.path.join(eclass_dir,
                                     "{}.eclass".format(eclass_name)),
                        "wt") as f:
                    if isinstance(eclass_content, str):
                        eclass_content = [eclass_content]
                    for line in eclass_content:
                        f.write("{}\n".format(line))

            # Temporarily write empty value of masters until it becomes default.
            if not repo_config or "layout.conf" not in repo_config:
                layout_conf_path = os.path.join(repo_dir, "metadata",
                                                "layout.conf")
                with open(layout_conf_path, "w") as f:
                    f.write("masters =\n")

            if repo == "test_repo":
                # Create a minimal profile in /var/db/repos/gentoo
                sub_profile_dir = os.path.join(profile_dir, "default", "linux",
                                               "x86", "test_profile")
                os.makedirs(sub_profile_dir)

                if not (profile and "eapi" in profile):
                    eapi_file = os.path.join(sub_profile_dir, "eapi")
                    with open(eapi_file, "w") as f:
                        f.write("0\n")

                make_defaults_file = os.path.join(sub_profile_dir,
                                                  "make.defaults")
                with open(make_defaults_file, "w") as f:
                    f.write('ARCH="x86"\n')
                    f.write('ACCEPT_KEYWORDS="x86"\n')

                use_force_file = os.path.join(sub_profile_dir, "use.force")
                with open(use_force_file, "w") as f:
                    f.write("x86\n")

                parent_file = os.path.join(sub_profile_dir, "parent")
                with open(parent_file, "w") as f:
                    f.write("..\n")

                if profile:
                    for config_file, lines in profile.items():
                        if config_file not in self.config_files:
                            raise ValueError("Unknown config file: '%s'" %
                                             config_file)

                        file_name = os.path.join(sub_profile_dir, config_file)
                        with open(file_name, "w") as f:
                            for line in lines:
                                f.write("%s\n" % line)

                # Create profile symlink
                os.symlink(sub_profile_dir,
                           os.path.join(user_config_dir, "make.profile"))

        gpg_test_path = os.environ["PORTAGE_GNUPGHOME"]

        make_conf = {
            "ACCEPT_KEYWORDS": "x86",
            "BINPKG_GPG_SIGNING_BASE_COMMAND":
            f"flock {gpg_test_path}/portage-binpkg-gpg.lock /usr/bin/gpg --sign --armor --yes --pinentry-mode loopback --passphrase GentooTest [PORTAGE_CONFIG]",
            "BINPKG_GPG_SIGNING_GPG_HOME": gpg_test_path,
            "BINPKG_GPG_SIGNING_KEY": "0x5D90EA06352177F6",
            "BINPKG_GPG_VERIFY_GPG_HOME": gpg_test_path,
            "CLEAN_DELAY": "0",
            "DISTDIR": self.distdir,
            "EMERGE_WARNING_DELAY": "0",
            "FEATURES": "${FEATURES} binpkg-signing binpkg-request-signature "
            "gpg-keepalive",
            "PKGDIR": self.pkgdir,
            "PORTAGE_INST_GID": str(portage.data.portage_gid),
            "PORTAGE_INST_UID": str(portage.data.portage_uid),
            "PORTAGE_TMPDIR": os.path.join(self.eroot, "var/tmp"),
        }

        if os.environ.get("NOCOLOR"):
            make_conf["NOCOLOR"] = os.environ["NOCOLOR"]

        # Pass along PORTAGE_USERNAME and PORTAGE_GRPNAME since they
        # need to be inherited by ebuild subprocesses.
        if "PORTAGE_USERNAME" in os.environ:
            make_conf["PORTAGE_USERNAME"] = os.environ["PORTAGE_USERNAME"]
        if "PORTAGE_GRPNAME" in os.environ:
            make_conf["PORTAGE_GRPNAME"] = os.environ["PORTAGE_GRPNAME"]

        make_conf_lines = []
        for k_v in make_conf.items():
            make_conf_lines.append('%s="%s"' % k_v)

        if "make.conf" in user_config:
            make_conf_lines.extend(user_config["make.conf"])

        if not portage.process.sandbox_capable or os.environ.get(
                "SANDBOX_ON") == "1":
            # avoid problems from nested sandbox instances
            make_conf_lines.append(
                'FEATURES="${FEATURES} -sandbox -usersandbox"')

        configs = user_config.copy()
        configs["make.conf"] = make_conf_lines

        for config_file, lines in configs.items():
            if config_file not in self.config_files:
                raise ValueError("Unknown config file: '%s'" % config_file)

            file_name = os.path.join(user_config_dir, config_file)
            with open(file_name, "w") as f:
                for line in lines:
                    f.write("%s\n" % line)

        # Create /usr/share/portage/config/make.globals
        make_globals_path = os.path.join(self.eroot,
                                         GLOBAL_CONFIG_PATH.lstrip(os.sep),
                                         "make.globals")
        ensure_dirs(os.path.dirname(make_globals_path))
        os.symlink(os.path.join(cnf_path, "make.globals"), make_globals_path)

        # Create /usr/share/portage/config/sets/portage.conf
        default_sets_conf_dir = os.path.join(self.eroot,
                                             "usr/share/portage/config/sets")

        try:
            os.makedirs(default_sets_conf_dir)
        except os.error:
            pass

        provided_sets_portage_conf = os.path.join(cnf_path, "sets",
                                                  "portage.conf")
        os.symlink(
            provided_sets_portage_conf,
            os.path.join(default_sets_conf_dir, "portage.conf"),
        )

        set_config_dir = os.path.join(user_config_dir, "sets")

        try:
            os.makedirs(set_config_dir)
        except os.error:
            pass

        for sets_file, lines in sets.items():
            file_name = os.path.join(set_config_dir, sets_file)
            with open(file_name, "w") as f:
                for line in lines:
                    f.write("%s\n" % line)

        if cnf_path_repoman is not None:
            # Create /usr/share/repoman
            repoman_share_dir = os.path.join(self.eroot, "usr", "share",
                                             "repoman")
            os.symlink(cnf_path_repoman, repoman_share_dir)
示例#57
0
 def _create_distfiles(self, distfiles):
     os.makedirs(self.distdir)
     for k, v in distfiles.items():
         with open(os.path.join(self.distdir, k), "wb") as f:
             f.write(v)
示例#58
0
    def __init__(
        self,
        ebuilds={},
        binpkgs={},
        installed={},
        profile={},
        repo_configs={},
        user_config={},
        sets={},
        world=[],
        world_sets=[],
        distfiles={},
        eclasses={},
        eprefix=None,
        targetroot=False,
        debug=False,
    ):
        """
        ebuilds: cpv -> metadata mapping simulating available ebuilds.
        installed: cpv -> metadata mapping simulating installed packages.
                If a metadata key is missing, it gets a default value.
        profile: settings defined by the profile.
        """

        self.debug = debug
        if eprefix is None:
            self.eprefix = normalize_path(tempfile.mkdtemp())

            # EPREFIX/bin is used by fake true_binaries. Real binaries goes into EPREFIX/usr/bin
            eubin = os.path.join(self.eprefix, "usr", "bin")
            ensure_dirs(eubin)
            for x in self.portage_bin:
                os.symlink(os.path.join(PORTAGE_BIN_PATH, x),
                           os.path.join(eubin, x))

            eusbin = os.path.join(self.eprefix, "usr", "sbin")
            ensure_dirs(eusbin)
            for x in self.portage_sbin:
                os.symlink(os.path.join(PORTAGE_BIN_PATH, x),
                           os.path.join(eusbin, x))

            essential_binaries = (
                "awk",
                "basename",
                "bzip2",
                "cat",
                "chgrp",
                "chmod",
                "chown",
                "comm",
                "cp",
                "egrep",
                "env",
                "find",
                "flock",
                "grep",
                "head",
                "install",
                "ln",
                "mkdir",
                "mkfifo",
                "mktemp",
                "mv",
                "readlink",
                "rm",
                "sed",
                "sort",
                "tar",
                "tr",
                "uname",
                "uniq",
                "xargs",
                "zstd",
            )
            # Exclude internal wrappers from PATH lookup.
            orig_path = os.environ["PATH"]
            included_paths = []
            for path in orig_path.split(":"):
                if path and not fnmatch.fnmatch(path,
                                                "*/portage/*/ebuild-helpers*"):
                    included_paths.append(path)
            try:
                os.environ["PATH"] = ":".join(included_paths)
                for x in essential_binaries:
                    path = find_binary(x)
                    if path is None:
                        raise portage.exception.CommandNotFound(x)
                    os.symlink(path, os.path.join(eubin, x))
            finally:
                os.environ["PATH"] = orig_path
        else:
            self.eprefix = normalize_path(eprefix)

        # Tests may override portage.const.EPREFIX in order to
        # simulate a prefix installation. It's reasonable to do
        # this because tests should be self-contained such that
        # the "real" value of portage.const.EPREFIX is entirely
        # irrelevant (see bug #492932).
        self._orig_eprefix = portage.const.EPREFIX
        portage.const.EPREFIX = self.eprefix.rstrip(os.sep)

        self.eroot = self.eprefix + os.sep
        if targetroot:
            self.target_root = os.path.join(self.eroot, "target_root")
        else:
            self.target_root = os.sep
        self.distdir = os.path.join(self.eroot, "var", "portage", "distfiles")
        self.pkgdir = os.path.join(self.eprefix, "pkgdir")
        self.vdbdir = os.path.join(self.eroot, "var/db/pkg")
        os.makedirs(self.vdbdir)

        if not debug:
            portage.util.noiselimit = -2

        self._repositories = {}
        # Make sure the main repo is always created
        self._get_repo_dir("test_repo")

        self._create_distfiles(distfiles)
        self._create_ebuilds(ebuilds)
        self._create_installed(installed)
        self._create_profile(ebuilds, eclasses, installed, profile,
                             repo_configs, user_config, sets)
        self._create_world(world, world_sets)

        self.settings, self.trees = self._load_config()

        self.gpg = None
        self._create_binpkgs(binpkgs)
        self._create_ebuild_manifests(ebuilds)

        portage.util.noiselimit = 0
示例#59
0
    async def _async_test_simple(
        self,
        playground,
        metadata_xml_files,
        profiles,
        profile,
        licenses,
        arch_list,
        use_desc,
        metadata_xsd,
        copyright_header,
        debug,
    ):
        settings = playground.settings
        eprefix = settings["EPREFIX"]
        eroot = settings["EROOT"]
        portdb = playground.trees[playground.eroot]["porttree"].dbapi
        homedir = os.path.join(eroot, "home")
        distdir = os.path.join(eprefix, "distdir")
        test_repo_location = settings.repositories["test_repo"].location
        profiles_dir = os.path.join(test_repo_location, "profiles")
        license_dir = os.path.join(test_repo_location, "licenses")

        repoman_cmd = (portage._python_interpreter, "-b", "-Wd",
                       os.path.join(self.bindir, "repoman"))

        git_binary = find_binary("git")
        git_cmd = (git_binary, )

        cp_binary = find_binary("cp")
        self.assertEqual(cp_binary is None, False, "cp command not found")
        cp_cmd = (cp_binary, )

        test_ebuild = portdb.findname("dev-libs/A-1")
        self.assertFalse(test_ebuild is None)

        committer_name = "Gentoo Dev"
        committer_email = "*****@*****.**"
        expected_warnings = {
            "returncode": 0,
            "warns": {
                "variable.phase": [
                    "dev-libs/C/C-0.ebuild: line 15: phase pkg_preinst: EAPI 7: variable A: Forbidden reference to variable specified by PMS",
                    "dev-libs/C/C-0.ebuild: line 15: phase pkg_preinst: EAPI 7: variable BROOT: Forbidden reference to variable specified by PMS",
                ]
            },
        }

        git_test = (
            ("", RepomanRun(args=["--version"])),
            ("", RepomanRun(args=["manifest"])),
            ("", git_cmd + (
                "config",
                "--global",
                "user.name",
                committer_name,
            )),
            ("", git_cmd + (
                "config",
                "--global",
                "user.email",
                committer_email,
            )),
            ("", git_cmd + ("init-db", )),
            ("", git_cmd + ("add", ".")),
            ("", git_cmd + ("commit", "-a", "-m", "add whole repo")),
            ("", RepomanRun(args=["full", "-d"], expected=expected_warnings)),
            ("",
             RepomanRun(args=[
                 "full", "--include-profiles", "default/linux/x86/test_profile"
             ],
                        expected=expected_warnings)),
            ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "2.ebuild")),
            ("", git_cmd + ("add", test_ebuild[:-8] + "2.ebuild")),
            ("",
             RepomanRun(args=["commit", "-m", "cat/pkg: bump to version 2"],
                        expected=expected_warnings)),
            ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "3.ebuild")),
            ("", git_cmd + ("add", test_ebuild[:-8] + "3.ebuild")),
            ("dev-libs",
             RepomanRun(args=["commit", "-m", "cat/pkg: bump to version 3"],
                        expected=expected_warnings)),
            ("", cp_cmd + (test_ebuild, test_ebuild[:-8] + "4.ebuild")),
            ("", git_cmd + ("add", test_ebuild[:-8] + "4.ebuild")),
            ("dev-libs/A",
             RepomanRun(args=["commit", "-m", "cat/pkg: bump to version 4"])),
        )

        env = {
            "PORTAGE_OVERRIDE_EPREFIX":
            eprefix,
            "DISTDIR":
            distdir,
            "GENTOO_COMMITTER_NAME":
            committer_name,
            "GENTOO_COMMITTER_EMAIL":
            committer_email,
            "HOME":
            homedir,
            "PATH":
            os.environ["PATH"],
            "PORTAGE_GRPNAME":
            os.environ["PORTAGE_GRPNAME"],
            "PORTAGE_USERNAME":
            os.environ["PORTAGE_USERNAME"],
            "PORTAGE_REPOSITORIES":
            settings.repositories.config_string(),
            "PYTHONDONTWRITEBYTECODE":
            os.environ.get("PYTHONDONTWRITEBYTECODE", ""),
        }

        if os.environ.get("SANDBOX_ON") == "1":
            # avoid problems from nested sandbox instances
            env["FEATURES"] = "-sandbox -usersandbox"

        dirs = [homedir, license_dir, profiles_dir, distdir]
        try:
            for d in dirs:
                ensure_dirs(d)
            with open(os.path.join(test_repo_location, "skel.ChangeLog"),
                      'w') as f:
                f.write(copyright_header)
            with open(os.path.join(profiles_dir, "profiles.desc"), 'w') as f:
                for x in profiles:
                    f.write("%s %s %s\n" % x)

            # ResolverPlayground only created the first profile,
            # so create the remaining ones.
            for x in profiles[1:]:
                sub_profile_dir = os.path.join(profiles_dir, x[1])
                ensure_dirs(sub_profile_dir)
                for config_file, lines in profile.items():
                    file_name = os.path.join(sub_profile_dir, config_file)
                    with open(file_name, "w") as f:
                        for line in lines:
                            f.write("%s\n" % line)

            for x in licenses:
                open(os.path.join(license_dir, x), 'wb').close()
            with open(os.path.join(profiles_dir, "arch.list"), 'w') as f:
                for x in arch_list:
                    f.write("%s\n" % x)
            with open(os.path.join(profiles_dir, "use.desc"), 'w') as f:
                for k, v in use_desc:
                    f.write("%s - %s\n" % (k, v))
            for cp, xml_data in metadata_xml_files:
                with open(os.path.join(test_repo_location, cp, "metadata.xml"),
                          'w') as f:
                    f.write(playground.metadata_xml_template % xml_data)
            # Use a symlink to test_repo, in order to trigger bugs
            # involving canonical vs. non-canonical paths.
            test_repo_symlink = os.path.join(eroot, "test_repo_symlink")
            os.symlink(test_repo_location, test_repo_symlink)
            metadata_xsd_dest = os.path.join(
                test_repo_location, 'metadata/xml-schema/metadata.xsd')
            os.makedirs(os.path.dirname(metadata_xsd_dest))
            os.symlink(metadata_xsd, metadata_xsd_dest)

            if debug:
                # The subprocess inherits both stdout and stderr, for
                # debugging purposes.
                stdout = None
            else:
                # The subprocess inherits stderr so that any warnings
                # triggered by python -Wd will be visible.
                stdout = subprocess.PIPE

            for cwd in ("", "dev-libs", "dev-libs/A", "dev-libs/B",
                        "dev-libs/C"):
                abs_cwd = os.path.join(test_repo_symlink, cwd)

                proc = await asyncio.create_subprocess_exec(*(repoman_cmd +
                                                              ("full", )),
                                                            env=env,
                                                            stderr=None,
                                                            stdout=stdout,
                                                            cwd=abs_cwd)

                if debug:
                    await proc.wait()
                else:
                    output, _err = await proc.communicate()
                    await proc.wait()
                    if proc.returncode != os.EX_OK:
                        portage.writemsg(output)

                self.assertEqual(os.EX_OK, proc.returncode,
                                 "repoman failed in %s" % (cwd, ))

            if git_binary is not None:
                for cwd, cmd in git_test:
                    abs_cwd = os.path.join(test_repo_symlink, cwd)
                    if isinstance(cmd, RepomanRun):
                        cmd.cwd = abs_cwd
                        cmd.env = env
                        cmd.debug = debug
                        await cmd.run()
                        if cmd.result[
                                "result"] != cmd.expected and cmd.result.get(
                                    "stdio"):
                            portage.writemsg(cmd.result["stdio"])
                        try:
                            self.assertEqual(cmd.result["result"],
                                             cmd.expected)
                        except Exception:
                            print(cmd.result["result"],
                                  file=sys.stderr,
                                  flush=True)
                            raise
                        continue

                    proc = await asyncio.create_subprocess_exec(*cmd,
                                                                env=env,
                                                                stderr=None,
                                                                stdout=stdout,
                                                                cwd=abs_cwd)

                    if debug:
                        await proc.wait()
                    else:
                        output, _err = await proc.communicate()
                        await proc.wait()
                        if proc.returncode != os.EX_OK:
                            portage.writemsg(output)

                    self.assertEqual(
                        os.EX_OK,
                        proc.returncode,
                        "%s failed in %s" % (
                            cmd,
                            cwd,
                        ),
                    )
        finally:
            playground.cleanup()
示例#60
0
    def _create_profile(self, ebuilds, installed, profile, repo_configs,
                        user_config, sets):

        user_config_dir = os.path.join(self.eroot, USER_CONFIG_PATH)

        try:
            os.makedirs(user_config_dir)
        except os.error:
            pass

        for repo in self.repo_dirs:
            repo_dir = self._get_repo_dir(repo)
            profile_dir = os.path.join(self._get_repo_dir(repo), "profiles")
            metadata_dir = os.path.join(repo_dir, "metadata")
            os.makedirs(metadata_dir)

            #Create $REPO/profiles/categories
            categories = set()
            for cpv in ebuilds:
                ebuilds_repo = Atom("=" + cpv, allow_repo=True).repo
                if ebuilds_repo is None:
                    ebuilds_repo = "test_repo"
                if ebuilds_repo == repo:
                    categories.add(catsplit(cpv)[0])

            categories_file = os.path.join(profile_dir, "categories")
            f = open(categories_file, "w")
            for cat in categories:
                f.write(cat + "\n")
            f.close()

            #Create $REPO/profiles/license_groups
            license_file = os.path.join(profile_dir, "license_groups")
            f = open(license_file, "w")
            f.write("EULA TEST\n")
            f.close()

            repo_config = repo_configs.get(repo)
            if repo_config:
                for config_file, lines in repo_config.items():
                    if config_file not in self.config_files:
                        raise ValueError("Unknown config file: '%s'" %
                                         config_file)

                    if config_file in ("layout.conf", ):
                        file_name = os.path.join(repo_dir, "metadata",
                                                 config_file)
                    else:
                        file_name = os.path.join(profile_dir, config_file)
                    f = open(file_name, "w")
                    for line in lines:
                        f.write("%s\n" % line)
                    f.close()

            #Create $profile_dir/eclass (we fail to digest the ebuilds if it's not there)
            os.makedirs(os.path.join(repo_dir, "eclass"))

            if repo == "test_repo":
                #Create a minimal profile in /usr/portage
                sub_profile_dir = os.path.join(profile_dir, "default", "linux",
                                               "x86", "test_profile")
                os.makedirs(sub_profile_dir)

                eapi_file = os.path.join(sub_profile_dir, "eapi")
                f = open(eapi_file, "w")
                f.write("0\n")
                f.close()

                make_defaults_file = os.path.join(sub_profile_dir,
                                                  "make.defaults")
                f = open(make_defaults_file, "w")
                f.write("ARCH=\"x86\"\n")
                f.write("ACCEPT_KEYWORDS=\"x86\"\n")
                f.close()

                use_force_file = os.path.join(sub_profile_dir, "use.force")
                f = open(use_force_file, "w")
                f.write("x86\n")
                f.close()

                parent_file = os.path.join(sub_profile_dir, "parent")
                f = open(parent_file, "w")
                f.write("..\n")
                f.close()

                if profile:
                    for config_file, lines in profile.items():
                        if config_file not in self.config_files:
                            raise ValueError("Unknown config file: '%s'" %
                                             config_file)

                        file_name = os.path.join(sub_profile_dir, config_file)
                        f = open(file_name, "w")
                        for line in lines:
                            f.write("%s\n" % line)
                        f.close()

                #Create profile symlink
                os.symlink(sub_profile_dir,
                           os.path.join(user_config_dir, "make.profile"))

                #Create minimal herds.xml
                herds_xml = """<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE herds SYSTEM "http://www.gentoo.org/dtd/herds.dtd">
<?xml-stylesheet href="/xsl/herds.xsl" type="text/xsl" ?>
<?xml-stylesheet href="/xsl/guide.xsl" type="text/xsl" ?>
<herds>
<herd>
  <name>base-system</name>
  <email>[email protected]</email>
  <description>Core system utilities and libraries.</description>
  <maintainer>
    <email>[email protected]</email>
    <name>Base System</name>
    <role>Base System Maintainer</role>
  </maintainer>
</herd>
</herds>
"""
                with open(os.path.join(metadata_dir, "metadata.xml"),
                          'w') as f:
                    f.write(herds_xml)

        # Write empty entries for each repository, in order to exercise
        # RepoConfigLoader's repos.conf processing.
        repos_conf_file = os.path.join(user_config_dir, "repos.conf")
        f = open(repos_conf_file, "w")
        for repo in sorted(self.repo_dirs.keys()):
            f.write("[%s]\n" % repo)
            f.write("\n")
        f.close()

        for config_file, lines in user_config.items():
            if config_file not in self.config_files:
                raise ValueError("Unknown config file: '%s'" % config_file)

            file_name = os.path.join(user_config_dir, config_file)
            f = open(file_name, "w")
            for line in lines:
                f.write("%s\n" % line)
            f.close()

        #Create /usr/share/portage/config/make.globals
        make_globals_path = os.path.join(self.eroot,
                                         GLOBAL_CONFIG_PATH.lstrip(os.sep),
                                         "make.globals")
        ensure_dirs(os.path.dirname(make_globals_path))
        os.symlink(os.path.join(PORTAGE_BASE_PATH, "cnf", "make.globals"),
                   make_globals_path)

        #Create /usr/share/portage/config/sets/portage.conf
        default_sets_conf_dir = os.path.join(self.eroot,
                                             "usr/share/portage/config/sets")

        try:
            os.makedirs(default_sets_conf_dir)
        except os.error:
            pass

        provided_sets_portage_conf = \
         os.path.join(PORTAGE_BASE_PATH, "cnf/sets/portage.conf")
        os.symlink(provided_sets_portage_conf,
                   os.path.join(default_sets_conf_dir, "portage.conf"))

        set_config_dir = os.path.join(user_config_dir, "sets")

        try:
            os.makedirs(set_config_dir)
        except os.error:
            pass

        for sets_file, lines in sets.items():
            file_name = os.path.join(set_config_dir, sets_file)
            f = open(file_name, "w")
            for line in lines:
                f.write("%s\n" % line)
            f.close()

        user_config_dir = os.path.join(self.eroot, "etc", "portage")

        try:
            os.makedirs(user_config_dir)
        except os.error:
            pass

        for config_file, lines in user_config.items():
            if config_file not in self.config_files:
                raise ValueError("Unknown config file: '%s'" % config_file)

            file_name = os.path.join(user_config_dir, config_file)
            f = open(file_name, "w")
            for line in lines:
                f.write("%s\n" % line)
            f.close()