Esempio n. 1
0
def first_existing(path):
    """
	Returns the first existing path element, traversing from the given
	path to the root directory. A path is considered to exist if lstat
	either succeeds or raises an error other than ENOENT or ESTALE.

	This can be particularly useful to check if there is permission to
	create a particular file or directory, without actually creating
	anything.

	@param path: a filesystem path
	@type path: str
	@rtype: str
	@return: the element that exists
	"""
    existing = False
    for path in iter_parents(path):
        try:
            os.lstat(path)
            existing = True
        except OSError as e:
            if e.errno not in (errno.ENOENT, errno.ESTALE):
                existing = True

        if existing:
            return path

    return os.sep
Esempio n. 2
0
	def _get_all_modules(self):
		"""scans the emaint modules dir for loadable modules

		@rtype: dictionary of module_plugins
		"""
		module_dir =  self._module_path
		importables = []
		names = os.listdir(module_dir)
		for entry in names:
			# skip any __init__ or __pycache__ files or directories
			if entry.startswith('__'):
				continue
			try:
				# test for statinfo to ensure it should a real module
				# it will bail if it errors
				os.lstat(os.path.join(module_dir, entry, '__init__.py'))
				importables.append(entry)
			except EnvironmentError:
				pass
		kids = {}
		for entry in importables:
			new_module = Module(entry, self._namepath)
			for module_name in new_module.kids:
				kid = new_module.kids[module_name]
				kid['parent'] = new_module
				kids[kid['name']] = kid
			self.parents.append(entry)
		return kids
Esempio n. 3
0
    def _get_all_modules(self):
        """scans the _module_path dir for loadable modules

		@rtype: dictionary of module_plugins
		"""
        module_dir = self._module_path
        importables = []
        names = os.listdir(module_dir)
        for entry in names:
            # skip any __init__ or __pycache__ files or directories
            if entry.startswith('__'):
                continue
            try:
                # test for statinfo to ensure it should a real module
                # it will bail if it errors
                os.lstat(os.path.join(module_dir, entry, '__init__.py'))
                importables.append(entry)
            except EnvironmentError:
                pass
        kids = {}
        for entry in importables:
            new_module = Module(entry, self._namepath)
            self._check_compat(new_module)
            for module_name in new_module.kids:
                kid = new_module.kids[module_name]
                kid['parent'] = new_module
                kids[kid['name']] = kid
            self.parents.append(entry)
        return kids
Esempio n. 4
0
def first_existing(path):
	"""
	Returns the first existing path element, traversing from the given
	path to the root directory. A path is considered to exist if lstat
	either succeeds or raises an error other than ENOENT or ESTALE.

	This can be particularly useful to check if there is permission to
	create a particular file or directory, without actually creating
	anything.

	@param path: a filesystem path
	@type path: str
	@rtype: str
	@return: the element that exists
	"""
	existing = False
	for path in iter_parents(path):
		try:
			os.lstat(path)
			existing = True
		except OSError as e:
			if e.errno not in (errno.ENOENT, errno.ESTALE):
				existing = True

		if existing:
			return path

	return os.sep
Esempio n. 5
0
 def _a_real_module(entry):
     try:
         # test for statinfo to ensure it should a real module
         # it will bail if it errors
         os.lstat(os.path.join(module_dir, entry, "__init__.py"))
     except EnvironmentError:
         return False
     return True
Esempio n. 6
0
def rcs_archive(archive, curconf, newconf, mrgconf):
	"""Archive existing config in rcs (on trunk). Then, if mrgconf is
	specified and an old branch version exists, merge the user's changes
	and the distributed changes and put the result into mrgconf.  Lastly,
	if newconf was specified, leave it in the archive dir with a .dist.new
	suffix along with the last 1.1.1 branch version with a .dist suffix."""

	try:
		os.makedirs(os.path.dirname(archive))
	except OSError:
		pass

	try:
		curconf_st = os.lstat(curconf)
	except OSError:
		curconf_st = None

	if curconf_st is not None and \
		(stat.S_ISREG(curconf_st.st_mode) or
		stat.S_ISLNK(curconf_st.st_mode)):
		_archive_copy(curconf_st, curconf, archive)

	if os.path.lexists(archive + ',v'):
		os.system(RCS_LOCK + ' ' + archive)
	os.system(RCS_PUT + ' ' + archive)

	ret = 0
	mystat = None
	if newconf:
		try:
			mystat = os.lstat(newconf)
		except OSError:
			pass

	if mystat is not None and \
		(stat.S_ISREG(mystat.st_mode) or
		stat.S_ISLNK(mystat.st_mode)):
		os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
		has_branch = os.path.lexists(archive)
		if has_branch:
			os.rename(archive, archive + '.dist')

		_archive_copy(mystat, newconf, archive)

		if has_branch:
			if mrgconf and os.path.isfile(archive) and \
				os.path.isfile(mrgconf):
				# This puts the results of the merge into mrgconf.
				ret = os.system(RCS_MERGE % (archive, mrgconf))
				os.chmod(mrgconf, mystat.st_mode)
				os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
		os.rename(archive, archive + '.dist.new')

	return ret
Esempio n. 7
0
def file_archive(archive, curconf, newconf, mrgconf):
    """Archive existing config to the archive-dir, bumping old versions
	out of the way into .# versions (log-rotate style). Then, if mrgconf
	was specified and there is a .dist version, merge the user's changes
	and the distributed changes and put the result into mrgconf.  Lastly,
	if newconf was specified, archive it as a .dist.new version (which
	gets moved to the .dist version at the end of the processing)."""

    _file_archive_ensure_dir(os.path.dirname(archive))

    # Archive the current config file if it isn't already saved
    if (os.path.lexists(archive) and len(
            diffstatusoutput_mixed("diff -aq '%s' '%s'", curconf, archive)[1])
            != 0):
        _file_archive_rotate(archive)

    try:
        curconf_st = os.lstat(curconf)
    except OSError:
        curconf_st = None

    if curconf_st is not None and \
     (stat.S_ISREG(curconf_st.st_mode) or
     stat.S_ISLNK(curconf_st.st_mode)):
        _archive_copy(curconf_st, curconf, archive)

    mystat = None
    if newconf:
        try:
            mystat = os.lstat(newconf)
        except OSError:
            pass

    if mystat is not None and \
     (stat.S_ISREG(mystat.st_mode) or
     stat.S_ISLNK(mystat.st_mode)):
        # Save off new config file in the archive dir with .dist.new suffix
        newconf_archive = archive + '.dist.new'
        if os.path.isdir(
                newconf_archive) and not os.path.islink(newconf_archive):
            _file_archive_rotate(newconf_archive)
        _archive_copy(mystat, newconf, newconf_archive)

        ret = 0
        if mrgconf and os.path.isfile(curconf) and \
         os.path.isfile(newconf) and \
         os.path.isfile(archive + '.dist'):
            # This puts the results of the merge into mrgconf.
            ret = os.system(DIFF3_MERGE %
                            (curconf, archive + '.dist', newconf, mrgconf))
            os.chmod(mrgconf, mystat.st_mode)
            os.chown(mrgconf, mystat.st_uid, mystat.st_gid)

        return ret
Esempio n. 8
0
def rcs_archive(archive, curconf, newconf, mrgconf):
	"""Archive existing config in rcs (on trunk). Then, if mrgconf is
	specified and an old branch version exists, merge the user's changes
	and the distributed changes and put the result into mrgconf.  Lastly,
	if newconf was specified, leave it in the archive dir with a .dist.new
	suffix along with the last 1.1.1 branch version with a .dist suffix."""

	try:
		os.makedirs(os.path.dirname(archive))
	except OSError:
		pass

	try:
		curconf_st = os.lstat(curconf)
	except OSError:
		curconf_st = None

	if curconf_st is not None and \
		(stat.S_ISREG(curconf_st.st_mode) or
		stat.S_ISLNK(curconf_st.st_mode)):
		_archive_copy(curconf_st, curconf, archive)

	if os.path.lexists(archive + ',v'):
		os.system(RCS_LOCK + ' ' + archive)
	os.system(RCS_PUT + ' ' + archive)

	ret = 0
	mystat = None
	if newconf:
		try:
			mystat = os.lstat(newconf)
		except OSError:
			pass

	if mystat is not None and \
		(stat.S_ISREG(mystat.st_mode) or
		stat.S_ISLNK(mystat.st_mode)):
		os.system(RCS_GET + ' -r' + RCS_BRANCH + ' ' + archive)
		has_branch = os.path.lexists(archive)
		if has_branch:
			os.rename(archive, archive + '.dist')

		_archive_copy(mystat, newconf, archive)

		if has_branch:
			if mrgconf and os.path.isfile(archive) and \
				os.path.isfile(mrgconf):
				# This puts the results of the merge into mrgconf.
				ret = os.system(RCS_MERGE % (archive, mrgconf))
				os.chmod(mrgconf, mystat.st_mode)
				os.chown(mrgconf, mystat.st_uid, mystat.st_gid)
		os.rename(archive, archive + '.dist.new')

	return ret
Esempio n. 9
0
def file_archive(archive, curconf, newconf, mrgconf):
	"""Archive existing config to the archive-dir, bumping old versions
	out of the way into .# versions (log-rotate style). Then, if mrgconf
	was specified and there is a .dist version, merge the user's changes
	and the distributed changes and put the result into mrgconf.  Lastly,
	if newconf was specified, archive it as a .dist.new version (which
	gets moved to the .dist version at the end of the processing)."""

	_file_archive_ensure_dir(os.path.dirname(archive))

	# Archive the current config file if it isn't already saved
	if (os.path.lexists(archive) and
		len(diffstatusoutput_mixed(
		"diff -aq '%s' '%s'", curconf, archive)[1]) != 0):
		_file_archive_rotate(archive)

	try:
		curconf_st = os.lstat(curconf)
	except OSError:
		curconf_st = None

	if curconf_st is not None and \
		(stat.S_ISREG(curconf_st.st_mode) or
		stat.S_ISLNK(curconf_st.st_mode)):
		_archive_copy(curconf_st, curconf, archive)

	mystat = None
	if newconf:
		try:
			mystat = os.lstat(newconf)
		except OSError:
			pass

	if mystat is not None and \
		(stat.S_ISREG(mystat.st_mode) or
		stat.S_ISLNK(mystat.st_mode)):
		# Save off new config file in the archive dir with .dist.new suffix
		newconf_archive = archive + '.dist.new'
		if os.path.isdir(newconf_archive
			) and not os.path.islink(newconf_archive):
			_file_archive_rotate(newconf_archive)
		_archive_copy(mystat, newconf, newconf_archive)

		ret = 0
		if mrgconf and os.path.isfile(curconf) and \
			os.path.isfile(newconf) and \
			os.path.isfile(archive + '.dist'):
			# This puts the results of the merge into mrgconf.
			ret = os.system(DIFF3_MERGE % (curconf, archive + '.dist', newconf, mrgconf))
			os.chmod(mrgconf, mystat.st_mode)
			os.chown(mrgconf, mystat.st_uid, mystat.st_gid)

		return ret
	def testCompileModules(self):
		for parent, dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						line = _unicode_decode(f.readline(),
							encoding=_encodings['content'], errors='replace')
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
	def testBashSyntax(self):
		for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				ext = x.split('.')[-1]
				if ext in ('.py', '.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				# Check for bash shebang
				f = open(_unicode_encode(x,
					encoding=_encodings['fs'], errors='strict'), 'rb')
				line = _unicode_decode(f.readline(),
					encoding=_encodings['content'], errors='replace')
				f.close()
				if line[:2] == '#!' and \
					'bash' in line:
					cmd = "%s -n %s" % (_shell_quote(BASH_BINARY), _shell_quote(x))
					status, output = subprocess_getstatusoutput(cmd)
					self.assertEqual(os.WIFEXITED(status) and \
						os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 12
0
def _prepare_fake_distdir(settings, alist):
	orig_distdir = settings["DISTDIR"]
	edpath = os.path.join(settings["PORTAGE_BUILDDIR"], "distdir")
	portage.util.ensure_dirs(edpath, gid=portage_gid, mode=0o755)

	# Remove any unexpected files or directories.
	for x in os.listdir(edpath):
		symlink_path = os.path.join(edpath, x)
		st = os.lstat(symlink_path)
		if x in alist and stat.S_ISLNK(st.st_mode):
			continue
		if stat.S_ISDIR(st.st_mode):
			shutil.rmtree(symlink_path)
		else:
			os.unlink(symlink_path)

	# Check for existing symlinks and recreate if necessary.
	for x in alist:
		symlink_path = os.path.join(edpath, x)
		target = os.path.join(orig_distdir, x)
		try:
			link_target = os.readlink(symlink_path)
		except OSError:
			os.symlink(target, symlink_path)
		else:
			if link_target != target:
				os.unlink(symlink_path)
				os.symlink(target, symlink_path)
Esempio n. 13
0
    def _need_update(self, cpv, data):

        if "MD5" not in data:
            return True

        size = data.get("SIZE")
        if size is None:
            return True

        mtime = data.get("MTIME")
        if mtime is None:
            return True

        pkg_path = self._bintree.getname(cpv)
        try:
            s = os.lstat(pkg_path)
        except OSError as e:
            if e.errno not in (errno.ENOENT, errno.ESTALE):
                raise
            # We can't update the index for this one because
            # it disappeared.
            return False

        try:
            if long(mtime) != s[stat.ST_MTIME]:
                return True
            if long(size) != long(s.st_size):
                return True
        except ValueError:
            return True

        return False
	def testCompileModules(self):
		for parent, dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				cfile = x
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang
					f = open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb')
					line = _unicode_decode(f.readline(),
						encoding=_encodings['content'], errors='replace')
					f.close()
					if line[:2] == '#!' and \
						'python' in line:
						do_compile = True
						cfile += '.py'
				if do_compile:
					cfile += (__debug__ and 'c' or 'o')
					py_compile.compile(x, cfile=cfile, doraise=True)
Esempio n. 15
0
def _file_archive_ensure_dir(parent_dir):
    """
    Ensure that the parent directory for an archive exists.
    If a file exists where a directory is needed, then rename
    it (see bug 256376).

    @param parent_dir: path of parent directory
    @type parent_dir: str
    """

    for parent in iter_parents(parent_dir):
        # Use lstat because a symlink to a directory might point
        # to a directory outside of the config archive, making
        # it an unsuitable parent.
        try:
            parent_st = os.lstat(parent)
        except OSError:
            pass
        else:
            if not stat.S_ISDIR(parent_st.st_mode):
                _file_archive_rotate(parent)
            break

    try:
        os.makedirs(parent_dir)
    except OSError:
        pass
Esempio n. 16
0
	def updateprotect(self):
		"""Update internal state for isprotected() calls.  Nonexistent paths
		are ignored."""

		os = _os_merge

		self.protect = []
		self._dirs = set()
		for x in self.protect_list:
			ppath = normalize_path(
				os.path.join(self.myroot, x.lstrip(os.path.sep)))
			try:
				if stat.S_ISDIR(os.stat(ppath).st_mode):
					self._dirs.add(ppath)
				self.protect.append(ppath)
			except OSError:
				# If it doesn't exist, there's no need to protect it.
				pass

		self.protectmask = []
		for x in self.mask_list:
			ppath = normalize_path(
				os.path.join(self.myroot, x.lstrip(os.path.sep)))
			try:
				"""Use lstat so that anything, even a broken symlink can be
				protected."""
				if stat.S_ISDIR(os.lstat(ppath).st_mode):
					self._dirs.add(ppath)
				self.protectmask.append(ppath)
				"""Now use stat in case this is a symlink to a directory."""
				if stat.S_ISDIR(os.stat(ppath).st_mode):
					self._dirs.add(ppath)
			except OSError:
				# If it doesn't exist, there's no need to mask it.
				pass
Esempio n. 17
0
	def _pkgindex_entry(self, cpv):
		"""
		Performs checksums, and gets size and mtime via lstat.
		Raises InvalidDependString if necessary.
		@rtype: dict
		@return: a dict containing entry for the give cpv.
		"""

		pkg_path = self.getname(cpv)

		d = dict(cpv._metadata.items())
		d.update(perform_multiple_checksums(
			pkg_path, hashes=self._pkgindex_hashes))

		d["CPV"] = cpv
		st = os.lstat(pkg_path)
		d["_mtime_"] = _unicode(st[stat.ST_MTIME])
		d["SIZE"] = _unicode(st.st_size)

		rel_path = pkg_path[len(self.pkgdir)+1:]
		# record location if it's non-default
		if rel_path != cpv + ".tbz2":
			d["PATH"] = rel_path

		return d
Esempio n. 18
0
    def testBashSyntax(self):
        for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='strict')
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict')
                ext = x.split('.')[-1]
                if ext in ('.py', '.pyc', '.pyo'):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict'), 'rb')
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings['content'],
                                       errors='replace')
                f.close()
                if line[:2] == '#!' and \
                 'bash' in line:
                    cmd = "%s -n %s" % (_shell_quote(BASH_BINARY),
                                        _shell_quote(x))
                    status, output = subprocess_getstatusoutput(cmd)
                    self.assertEqual(os.WIFEXITED(status) and \
                     os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 19
0
	def _pkgindex_entry(self, cpv):
		"""
		Performs checksums, and gets size and mtime via lstat.
		Raises InvalidDependString if necessary.
		@rtype: dict
		@return: a dict containing entry for the give cpv.
		"""

		pkg_path = self.getname(cpv)

		d = dict(cpv._metadata.items())
		d.update(perform_multiple_checksums(
			pkg_path, hashes=self._pkgindex_hashes))

		d["CPV"] = cpv
		st = os.lstat(pkg_path)
		d["_mtime_"] = _unicode(st[stat.ST_MTIME])
		d["SIZE"] = _unicode(st.st_size)

		rel_path = pkg_path[len(self.pkgdir)+1:]
		# record location if it's non-default
		if rel_path != cpv + ".tbz2":
			d["PATH"] = rel_path

		return d
Esempio n. 20
0
	def _init_ipc_fifos(self):

		input_fifo = os.path.join(
			self.settings['PORTAGE_BUILDDIR'], '.ipc_in')
		output_fifo = os.path.join(
			self.settings['PORTAGE_BUILDDIR'], '.ipc_out')

		for p in (input_fifo, output_fifo):

			st = None
			try:
				st = os.lstat(p)
			except OSError:
				os.mkfifo(p)
			else:
				if not stat.S_ISFIFO(st.st_mode):
					st = None
					try:
						os.unlink(p)
					except OSError:
						pass
					os.mkfifo(p)

			apply_secpass_permissions(p,
				uid=os.getuid(),
				gid=portage.data.portage_gid,
				mode=0o770, stat_cached=st)

		return (input_fifo, output_fifo)
Esempio n. 21
0
 def __iter__(self):
     """generator for walking the dir struct"""
     dirs = [(0, self.location)]
     len_base = len(self.location)
     while dirs:
         depth, dir_path = dirs.pop()
         try:
             dir_list = os.listdir(dir_path)
         except OSError as e:
             if e.errno != errno.ENOENT:
                 raise
             del e
             continue
         for l in dir_list:
             if l.endswith(".cpickle"):
                 continue
             p = os.path.join(dir_path, l)
             try:
                 st = os.lstat(p)
             except OSError:
                 # Cache entry disappeared.
                 continue
             if stat.S_ISDIR(st.st_mode):
                 # Only recurse 1 deep, in order to avoid iteration over
                 # entries from another nested cache instance. This can
                 # happen if the user nests an overlay inside
                 # /usr/portage/local as in bug #302764.
                 if depth < 1:
                     dirs.append((depth + 1, p))
                 continue
             yield p[len_base + 1:]
Esempio n. 22
0
def _file_archive_ensure_dir(parent_dir):
	"""
	Ensure that the parent directory for an archive exists.
	If a file exists where a directory is needed, then rename
	it (see bug 256376).

	@param parent_dir: path of parent directory
	@type parent_dir: str
	"""

	for parent in iter_parents(parent_dir):
		# Use lstat because a symlink to a directory might point
		# to a directory outside of the config archive, making
		# it an unsuitable parent.
		try:
			parent_st = os.lstat(parent)
		except OSError:
			pass
		else:
			if not stat.S_ISDIR(parent_st.st_mode):
				_file_archive_rotate(parent)
			break

	try:
		os.makedirs(parent_dir)
	except OSError:
		pass
Esempio n. 23
0
	def _need_update(self, cpv, data):

		if "MD5" not in data:
			return True

		size = data.get("SIZE")
		if size is None:
			return True

		mtime = data.get("_mtime_")
		if mtime is None:
			return True

		pkg_path = self._bintree.getname(cpv)
		try:
			s = os.lstat(pkg_path)
		except OSError as e:
			if e.errno not in (errno.ENOENT, errno.ESTALE):
				raise
			# We can't update the index for this one because
			# it disappeared.
			return False

		try:
			if long(mtime) != s[stat.ST_MTIME]:
				return True
			if long(size) != long(s.st_size):
				return True
		except ValueError:
			return True

		return False
Esempio n. 24
0
    def _init_ipc_fifos(self):

        input_fifo = os.path.join(self.settings["PORTAGE_BUILDDIR"], ".ipc_in")
        output_fifo = os.path.join(self.settings["PORTAGE_BUILDDIR"],
                                   ".ipc_out")

        for p in (input_fifo, output_fifo):

            st = None
            try:
                st = os.lstat(p)
            except OSError:
                os.mkfifo(p)
            else:
                if not stat.S_ISFIFO(st.st_mode):
                    st = None
                    try:
                        os.unlink(p)
                    except OSError:
                        pass
                    os.mkfifo(p)

            apply_secpass_permissions(
                p,
                uid=os.getuid(),
                gid=portage.data.portage_gid,
                mode=0o770,
                stat_cached=st,
            )

        return (input_fifo, output_fifo)
Esempio n. 25
0
def cacheddir(my_original_path,
              ignorecvs,
              ignorelist,
              EmptyOnError,
              followSymlinks=True):
    mypath = normalize_path(my_original_path)
    try:
        pathstat = os.stat(mypath)
        if not stat.S_ISDIR(pathstat.st_mode):
            raise DirectoryNotFound(mypath)
    except EnvironmentError as e:
        if e.errno == PermissionDenied.errno:
            raise PermissionDenied(mypath)
        del e
        return [], []
    except PortageException:
        return [], []
    else:
        try:
            fpaths = os.listdir(mypath)
        except EnvironmentError as e:
            if e.errno != errno.EACCES:
                raise
            del e
            raise PermissionDenied(mypath)
        ftype = []
        for x in fpaths:
            try:
                if followSymlinks:
                    pathstat = os.stat(mypath + "/" + x)
                else:
                    pathstat = os.lstat(mypath + "/" + x)

                if stat.S_ISREG(pathstat[stat.ST_MODE]):
                    ftype.append(0)
                elif stat.S_ISDIR(pathstat[stat.ST_MODE]):
                    ftype.append(1)
                elif stat.S_ISLNK(pathstat[stat.ST_MODE]):
                    ftype.append(2)
                else:
                    ftype.append(3)
            except (IOError, OSError):
                ftype.append(3)

    if ignorelist or ignorecvs:
        ret_list = []
        ret_ftype = []
        for file_path, file_type in zip(fpaths, ftype):
            if file_path in ignorelist:
                pass
            elif ignorecvs:
                if file_path[:2] != ".#" and not (file_type == 1
                                                  and file_path in VCS_DIRS):
                    ret_list.append(file_path)
                    ret_ftype.append(file_type)
    else:
        ret_list = fpaths
        ret_ftype = ftype

    return ret_list, ret_ftype
Esempio n. 26
0
	def updateprotect(self):
		"""Update internal state for isprotected() calls.  Nonexistent paths
		are ignored."""

		os = _os_merge

		self.protect = []
		self._dirs = set()
		for x in self.protect_list:
			ppath = normalize_path(
				os.path.join(self.myroot, x.lstrip(os.path.sep)))
			try:
				if stat.S_ISDIR(os.stat(ppath).st_mode):
					self._dirs.add(ppath)
				self.protect.append(ppath)
			except OSError:
				# If it doesn't exist, there's no need to protect it.
				pass

		self.protectmask = []
		for x in self.mask_list:
			ppath = normalize_path(
				os.path.join(self.myroot, x.lstrip(os.path.sep)))
			try:
				"""Use lstat so that anything, even a broken symlink can be
				protected."""
				if stat.S_ISDIR(os.lstat(ppath).st_mode):
					self._dirs.add(ppath)
				self.protectmask.append(ppath)
				"""Now use stat in case this is a symlink to a directory."""
				if stat.S_ISDIR(os.stat(ppath).st_mode):
					self._dirs.add(ppath)
			except OSError:
				# If it doesn't exist, there's no need to mask it.
				pass
Esempio n. 27
0
	def __iter__(self):
		"""generator for walking the dir struct"""
		dirs = [(0, self.location)]
		len_base = len(self.location)
		while dirs:
			depth, dir_path = dirs.pop()
			try:
				dir_list = os.listdir(dir_path)
			except OSError as e:
				if e.errno != errno.ENOENT:
					raise
				del e
				continue
			for l in dir_list:
				p = os.path.join(dir_path, l)
				try:
					st = os.lstat(p)
				except OSError:
					# Cache entry disappeared.
					continue
				if stat.S_ISDIR(st.st_mode):
					# Only recurse 1 deep, in order to avoid iteration over
					# entries from another nested cache instance. This can
					# happen if the user nests an overlay inside
					# /usr/portage/local as in bug #302764.
					if depth < 1:
						dirs.append((depth+1, p))
					continue

				try:
					yield _pkg_str(p[len_base+1:])
				except InvalidData:
					continue
Esempio n. 28
0
def cacheddir(my_original_path, ignorecvs, ignorelist, EmptyOnError, followSymlinks=True):
	mypath = normalize_path(my_original_path)
	try:
		pathstat = os.stat(mypath)
		if not stat.S_ISDIR(pathstat.st_mode):
			raise DirectoryNotFound(mypath)
	except EnvironmentError as e:
		if e.errno == PermissionDenied.errno:
			raise PermissionDenied(mypath)
		del e
		return [], []
	except PortageException:
		return [], []
	else:
		try:
			fpaths = os.listdir(mypath)
		except EnvironmentError as e:
			if e.errno != errno.EACCES:
				raise
			del e
			raise PermissionDenied(mypath)
		ftype = []
		for x in fpaths:
			try:
				if followSymlinks:
					pathstat = os.stat(mypath+"/"+x)
				else:
					pathstat = os.lstat(mypath+"/"+x)

				if stat.S_ISREG(pathstat[stat.ST_MODE]):
					ftype.append(0)
				elif stat.S_ISDIR(pathstat[stat.ST_MODE]):
					ftype.append(1)
				elif stat.S_ISLNK(pathstat[stat.ST_MODE]):
					ftype.append(2)
				else:
					ftype.append(3)
			except (IOError, OSError):
				ftype.append(3)

	if ignorelist or ignorecvs:
		ret_list = []
		ret_ftype = []
		for file_path, file_type in zip(fpaths, ftype):
			if file_path in ignorelist:
				pass
			elif ignorecvs:
				if file_path[:2] != ".#" and \
					not (file_type == 1 and file_path in VCS_DIRS):
					ret_list.append(file_path)
					ret_ftype.append(file_type)
	else:
		ret_list = fpaths
		ret_ftype = ftype

	return ret_list, ret_ftype
Esempio n. 29
0
	def aux_get(self, mycpv, wants, myrepo=None):
		if self.bintree and not self.bintree.populated:
			self.bintree.populate()
		# Support plain string for backward compatibility with API
		# consumers (including portageq, which passes in a cpv from
		# a command-line argument).
		instance_key = self._instance_key(mycpv,
			support_string=True)
		if not self._known_keys.intersection(
			wants).difference(self._aux_cache_keys):
			aux_cache = self.cpvdict[instance_key]
			if aux_cache is not None:
				return [aux_cache.get(x, "") for x in wants]
		mysplit = mycpv.split("/")
		mylist = []
		tbz2name = mysplit[1]+".tbz2"
		if not self.bintree._remotepkgs or \
			not self.bintree.isremote(mycpv):
			try:
				tbz2_path = self.bintree._pkg_paths[instance_key]
			except KeyError:
				raise KeyError(mycpv)
			tbz2_path = os.path.join(self.bintree.pkgdir, tbz2_path)
			try:
				st = os.lstat(tbz2_path)
			except OSError:
				raise KeyError(mycpv)
			metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
			def getitem(k):
				if k == "_mtime_":
					return _unicode(st[stat.ST_MTIME])
				elif k == "SIZE":
					return _unicode(st.st_size)
				v = metadata_bytes.get(_unicode_encode(k,
					encoding=_encodings['repo.content'],
					errors='backslashreplace'))
				if v is not None:
					v = _unicode_decode(v,
						encoding=_encodings['repo.content'], errors='replace')
				return v
		else:
			getitem = self.cpvdict[instance_key].get
		mydata = {}
		mykeys = wants
		for x in mykeys:
			myval = getitem(x)
			# myval is None if the key doesn't exist
			# or the tbz2 is corrupt.
			if myval:
				mydata[x] = " ".join(myval.split())

		if not mydata.setdefault('EAPI', '0'):
			mydata['EAPI'] = '0'

		return [mydata.get(x, '') for x in wants]
Esempio n. 30
0
def find_updated_config_files(target_root, config_protect):
	"""
	Return a tuple of configuration files that needs to be updated.
	The tuple contains lists organized like this:
	[ protected_dir, file_list ]
	If the protected config isn't a protected_dir but a procted_file, list is:
	[ protected_file, None ]
	If no configuration files needs to be updated, None is returned
	"""

	os = _os_merge

	if config_protect:
		# directories with some protect files in them
		for x in config_protect:
			files = []

			x = os.path.join(target_root, x.lstrip(os.path.sep))
			if not os.access(x, os.W_OK):
				continue
			try:
				mymode = os.lstat(x).st_mode
			except OSError:
				continue

			if stat.S_ISLNK(mymode):
				# We want to treat it like a directory if it
				# is a symlink to an existing directory.
				try:
					real_mode = os.stat(x).st_mode
					if stat.S_ISDIR(real_mode):
						mymode = real_mode
				except OSError:
					pass

			if stat.S_ISDIR(mymode):
				mycommand = \
					"find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
			else:
				mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
						os.path.split(x.rstrip(os.path.sep))
			mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
			a = subprocess_getstatusoutput(mycommand)

			if a[0] == 0:
				files = a[1].split('\0')
				# split always produces an empty string as the last element
				if files and not files[-1]:
					del files[-1]
				if files:
					if stat.S_ISDIR(mymode):
						yield (x, files)
					else:
						yield (x, None)
Esempio n. 31
0
def find_updated_config_files(target_root, config_protect):
	"""
	Return a tuple of configuration files that needs to be updated.
	The tuple contains lists organized like this:
	[ protected_dir, file_list ]
	If the protected config isn't a protected_dir but a procted_file, list is:
	[ protected_file, None ]
	If no configuration files needs to be updated, None is returned
	"""

	os = _os_merge

	if config_protect:
		# directories with some protect files in them
		for x in config_protect:
			files = []

			x = os.path.join(target_root, x.lstrip(os.path.sep))
			if not os.access(x, os.W_OK):
				continue
			try:
				mymode = os.lstat(x).st_mode
			except OSError:
				continue

			if stat.S_ISLNK(mymode):
				# We want to treat it like a directory if it
				# is a symlink to an existing directory.
				try:
					real_mode = os.stat(x).st_mode
					if stat.S_ISDIR(real_mode):
						mymode = real_mode
				except OSError:
					pass

			if stat.S_ISDIR(mymode):
				mycommand = \
					"find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
			else:
				mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
						os.path.split(x.rstrip(os.path.sep))
			mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
			a = subprocess_getstatusoutput(mycommand)

			if a[0] == 0:
				files = a[1].split('\0')
				# split always produces an empty string as the last element
				if files and not files[-1]:
					del files[-1]
				if files:
					if stat.S_ISDIR(mymode):
						yield (x, files)
					else:
						yield (x, None)
Esempio n. 32
0
	def prevent_collision(self, cpv):
		"""Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
		use for a given cpv.  If a collision will occur with an existing
		package from another category, the existing package will be bumped to
		${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
		if not self._all_directory:
			return

		# Copy group permissions for new directories that
		# may have been created.
		for path in ("All", catsplit(cpv)[0]):
			path = os.path.join(self.pkgdir, path)
			self._ensure_dir(path)
			if not os.access(path, os.W_OK):
				raise PermissionDenied("access('%s', W_OK)" % path)

		full_path = self.getname(cpv)
		if "All" == full_path.split(os.path.sep)[-2]:
			return
		"""Move a colliding package if it exists.  Code below this point only
		executes in rare cases."""
		mycat, mypkg = catsplit(cpv)
		myfile = mypkg + ".tbz2"
		mypath = os.path.join("All", myfile)
		dest_path = os.path.join(self.pkgdir, mypath)

		try:
			st = os.lstat(dest_path)
		except OSError:
			st = None
		else:
			if stat.S_ISLNK(st.st_mode):
				st = None
				try:
					os.unlink(dest_path)
				except OSError:
					if os.path.exists(dest_path):
						raise

		if st is not None:
			# For invalid packages, other_cat could be None.
			other_cat = portage.xpak.tbz2(dest_path).getfile(
				_unicode_encode("CATEGORY",
				encoding=_encodings['repo.content']))
			if other_cat:
				other_cat = _unicode_decode(other_cat,
					encoding=_encodings['repo.content'], errors='replace')
				other_cat = other_cat.strip()
				other_cpv = other_cat + "/" + mypkg
				self._move_from_all(other_cpv)
				self.inject(other_cpv)
		self._move_to_all(cpv)
Esempio n. 33
0
	def prevent_collision(self, cpv):
		"""Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
		use for a given cpv.  If a collision will occur with an existing
		package from another category, the existing package will be bumped to
		${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
		if not self._all_directory:
			return

		# Copy group permissions for new directories that
		# may have been created.
		for path in ("All", catsplit(cpv)[0]):
			path = os.path.join(self.pkgdir, path)
			self._ensure_dir(path)
			if not os.access(path, os.W_OK):
				raise PermissionDenied("access('%s', W_OK)" % path)

		full_path = self.getname(cpv)
		if "All" == full_path.split(os.path.sep)[-2]:
			return
		"""Move a colliding package if it exists.  Code below this point only
		executes in rare cases."""
		mycat, mypkg = catsplit(cpv)
		myfile = mypkg + ".tbz2"
		mypath = os.path.join("All", myfile)
		dest_path = os.path.join(self.pkgdir, mypath)

		try:
			st = os.lstat(dest_path)
		except OSError:
			st = None
		else:
			if stat.S_ISLNK(st.st_mode):
				st = None
				try:
					os.unlink(dest_path)
				except OSError:
					if os.path.exists(dest_path):
						raise

		if st is not None:
			# For invalid packages, other_cat could be None.
			other_cat = portage.xpak.tbz2(dest_path).getfile(
				_unicode_encode("CATEGORY",
				encoding=_encodings['repo.content']))
			if other_cat:
				other_cat = _unicode_decode(other_cat,
					encoding=_encodings['repo.content'], errors='replace')
				other_cat = other_cat.strip()
				other_cpv = other_cat + "/" + mypkg
				self._move_from_all(other_cpv)
				self.inject(other_cpv)
		self._move_to_all(cpv)
Esempio n. 34
0
 def modify_files(dir_path):
     for name in os.listdir(dir_path):
         path = os.path.join(dir_path, name)
         st = os.lstat(path)
         if stat.S_ISREG(st.st_mode):
             with io.open(path, mode='a',
                          encoding=_encodings["stdio"]) as f:
                 f.write("modified at %d\n" % time.time())
         elif stat.S_ISLNK(st.st_mode):
             old_dest = os.readlink(path)
             os.unlink(path)
             os.symlink(old_dest + " modified at %d" % time.time(),
                        path)
Esempio n. 35
0
		def modify_files(dir_path):
			for name in os.listdir(dir_path):
				path = os.path.join(dir_path, name)
				st = os.lstat(path)
				if stat.S_ISREG(st.st_mode):
					with io.open(path, mode='a',
						encoding=_encodings["stdio"]) as f:
						f.write("modified at %d\n" % time.time())
				elif stat.S_ISLNK(st.st_mode):
					old_dest = os.readlink(path)
					os.unlink(path)
					os.symlink(old_dest +
						" modified at %d" % time.time(), path)
Esempio n. 36
0
    def testBashSyntax(self):
        locations = [PORTAGE_BIN_PATH]
        misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
        if os.path.isdir(misc_dir):
            locations.append(misc_dir)
        for parent, dirs, files in chain.from_iterable(
                os.walk(x) for x in locations):
            parent = _unicode_decode(parent,
                                     encoding=_encodings["fs"],
                                     errors="strict")
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings["fs"],
                                    errors="strict")
                ext = x.split(".")[-1]
                if ext in (".py", ".pyc", ".pyo"):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings["fs"],
                                    errors="strict"), "rb")
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings["content"],
                                       errors="replace")
                f.close()
                if line[:2] == "#!" and "bash" in line:
                    cmd = [BASH_BINARY, "-n", x]
                    cmd = [
                        _unicode_encode(x,
                                        encoding=_encodings["fs"],
                                        errors="strict") for x in cmd
                    ]
                    proc = subprocess.Popen(cmd,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    output = _unicode_decode(proc.communicate()[0],
                                             encoding=_encodings["fs"])
                    status = proc.wait()
                    self.assertEqual(
                        os.WIFEXITED(status)
                        and os.WEXITSTATUS(status) == os.EX_OK,
                        True,
                        msg=output,
                    )
	def testCompileModules(self):
		iters = [os.walk(os.path.join(PORTAGE_PYM_PATH, x))
			for x in PORTAGE_PYM_PACKAGES]
		iters.append(os.walk(PORTAGE_BIN_PATH))

		for parent, _dirs, files in itertools.chain(*iters):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				bin_path = os.path.relpath(x, PORTAGE_BIN_PATH)
				mod_path = os.path.relpath(x, PORTAGE_PYM_PATH)

				meta = module_metadata.get(mod_path) or script_metadata.get(bin_path)
				if meta:
					req_py = tuple(int(x) for x
							in meta.get('required_python', '0.0').split('.'))
					if sys.version_info < req_py:
						continue

				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang.
					try:
						with open(_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict'), 'rb') as f:
							line = _unicode_decode(f.readline(),
								encoding=_encodings['content'], errors='replace')
					except IOError as e:
						# Some tests create files that are unreadable by the
						# user (by design), so ignore EACCES issues.
						if e.errno != errno.EACCES:
							raise
						continue
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
Esempio n. 38
0
    def testBashSyntax(self):
        locations = [PORTAGE_BIN_PATH]
        misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
        if os.path.isdir(misc_dir):
            locations.append(misc_dir)
        for parent, dirs, files in \
         chain.from_iterable(os.walk(x) for x in locations):
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='strict')
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict')
                ext = x.split('.')[-1]
                if ext in ('.py', '.pyc', '.pyo'):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict'), 'rb')
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings['content'],
                                       errors='replace')
                f.close()
                if line[:2] == '#!' and \
                 'bash' in line:
                    cmd = [BASH_BINARY, "-n", x]
                    if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
                        # Python 3.1 does not support bytes in Popen args.
                        cmd = [
                            _unicode_encode(x,
                                            encoding=_encodings['fs'],
                                            errors='strict') for x in cmd
                        ]
                    proc = subprocess.Popen(cmd,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    output = _unicode_decode(proc.communicate()[0],
                                             encoding=_encodings['fs'])
                    status = proc.wait()
                    self.assertEqual(os.WIFEXITED(status) and \
                     os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 39
0
	def __iter__(self):
		"""generator for walking the dir struct"""
		dirs = [self.location]
		len_base = len(self.location)
		while len(dirs):
			for l in os.listdir(dirs[0]):
				if l.endswith(".cpickle"):
					continue
				p = os.path.join(dirs[0],l)
				st = os.lstat(p)
				if stat.S_ISDIR(st.st_mode):
					dirs.append(p)
					continue
				yield p[len_base+1:]
			dirs.pop(0)
Esempio n. 40
0
 def __iter__(self):
     """generator for walking the dir struct"""
     dirs = [self.location]
     len_base = len(self.location)
     while len(dirs):
         for l in os.listdir(dirs[0]):
             if l.endswith(".cpickle"):
                 continue
             p = os.path.join(dirs[0], l)
             st = os.lstat(p)
             if stat.S_ISDIR(st.st_mode):
                 dirs.append(p)
                 continue
             yield p[len_base + 1:]
         dirs.pop(0)
Esempio n. 41
0
def diff_mixed(func, file1, file2):
    tempdir = None
    try:
        if os.path.islink(file1) and \
         not os.path.islink(file2) and \
         os.path.isfile(file1) and \
         os.path.isfile(file2):
            # If a regular file replaces a symlink to a regular
            # file, then show the diff between the regular files
            # (bug #330221).
            diff_files = (file2, file2)
        else:
            files = [file1, file2]
            diff_files = [file1, file2]
            for i in range(len(diff_files)):
                try:
                    st = os.lstat(diff_files[i])
                except OSError:
                    st = None
                if st is not None and stat.S_ISREG(st.st_mode):
                    continue

                if tempdir is None:
                    tempdir = tempfile.mkdtemp()
                diff_files[i] = os.path.join(tempdir, "%d" % i)
                if st is None:
                    content = "/dev/null\n"
                elif stat.S_ISLNK(st.st_mode):
                    link_dest = os.readlink(files[i])
                    content = "SYM: %s -> %s\n" % \
                     (file1, link_dest)
                elif stat.S_ISDIR(st.st_mode):
                    content = "DIR: %s\n" % (file1, )
                elif stat.S_ISFIFO(st.st_mode):
                    content = "FIF: %s\n" % (file1, )
                else:
                    content = "DEV: %s\n" % (file1, )
                with io.open(diff_files[i],
                             mode='w',
                             encoding=_encodings['stdio']) as f:
                    f.write(content)

        return func(diff_files[0], diff_files[1])

    finally:
        if tempdir is not None:
            shutil.rmtree(tempdir)
Esempio n. 42
0
	def _move_to_all(self, cpv):
		"""If the file exists, move it.  Whether or not it exists, update state
		for future getname() calls."""
		mycat, mypkg = catsplit(cpv)
		myfile = mypkg + ".tbz2"
		self._pkg_paths[cpv] = os.path.join("All", myfile)
		src_path = os.path.join(self.pkgdir, mycat, myfile)
		try:
			mystat = os.lstat(src_path)
		except OSError as e:
			mystat = None
		if mystat and stat.S_ISREG(mystat.st_mode):
			self._ensure_dir(os.path.join(self.pkgdir, "All"))
			dest_path = os.path.join(self.pkgdir, "All", myfile)
			_movefile(src_path, dest_path, mysettings=self.settings)
			self._create_symlink(cpv)
			self.inject(cpv)
Esempio n. 43
0
	def _move_to_all(self, cpv):
		"""If the file exists, move it.  Whether or not it exists, update state
		for future getname() calls."""
		mycat, mypkg = catsplit(cpv)
		myfile = mypkg + ".tbz2"
		self._pkg_paths[cpv] = os.path.join("All", myfile)
		src_path = os.path.join(self.pkgdir, mycat, myfile)
		try:
			mystat = os.lstat(src_path)
		except OSError as e:
			mystat = None
		if mystat and stat.S_ISREG(mystat.st_mode):
			self._ensure_dir(os.path.join(self.pkgdir, "All"))
			dest_path = os.path.join(self.pkgdir, "All", myfile)
			_movefile(src_path, dest_path, mysettings=self.settings)
			self._create_symlink(cpv)
			self.inject(cpv)
Esempio n. 44
0
def diff_mixed(func, file1, file2):
	tempdir = None
	try:
		if os.path.islink(file1) and \
			not os.path.islink(file2) and \
			os.path.isfile(file1) and \
			os.path.isfile(file2):
			# If a regular file replaces a symlink to a regular
			# file, then show the diff between the regular files
			# (bug #330221).
			diff_files = (file2, file2)
		else:
			files = [file1, file2]
			diff_files = [file1, file2]
			for i in range(len(diff_files)):
				try:
					st = os.lstat(diff_files[i])
				except OSError:
					st = None
				if st is not None and stat.S_ISREG(st.st_mode):
					continue

				if tempdir is None:
					tempdir = tempfile.mkdtemp()
				diff_files[i] = os.path.join(tempdir, "%d" % i)
				if st is None:
					content = "/dev/null\n"
				elif stat.S_ISLNK(st.st_mode):
					link_dest = os.readlink(files[i])
					content = "SYM: %s -> %s\n" % \
						(file1, link_dest)
				elif stat.S_ISDIR(st.st_mode):
					content = "DIR: %s\n" % (file1,)
				elif stat.S_ISFIFO(st.st_mode):
					content = "FIF: %s\n" % (file1,)
				else:
					content = "DEV: %s\n" % (file1,)
				with io.open(diff_files[i], mode='w',
					encoding=_encodings['stdio']) as f:
					f.write(content)

		return func(diff_files[0], diff_files[1])

	finally:
		if tempdir is not None:
			shutil.rmtree(tempdir)
Esempio n. 45
0
def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
	stat_cached=None, follow_links=True):
	"""A wrapper around apply_permissions that uses secpass and simple
	logic to apply as much of the permissions as possible without
	generating an obviously avoidable permission exception. Despite
	attempts to avoid an exception, it's possible that one will be raised
	anyway, so be prepared.
	Returns True if all permissions are applied and False if some are left
	unapplied."""

	if stat_cached is None:
		try:
			if follow_links:
				stat_cached = os.stat(filename)
			else:
				stat_cached = os.lstat(filename)
		except OSError as oe:
			func_call = "stat('%s')" % filename
			if oe.errno == errno.EPERM:
				raise OperationNotPermitted(func_call)
			elif oe.errno == errno.EACCES:
				raise PermissionDenied(func_call)
			elif oe.errno == errno.ENOENT:
				raise FileNotFound(filename)
			else:
				raise

	all_applied = True

	if portage.data.secpass < 2:

		if uid != -1 and \
		uid != stat_cached.st_uid:
			all_applied = False
			uid = -1

		if gid != -1 and \
		gid != stat_cached.st_gid and \
		gid not in os.getgroups():
			all_applied = False
			gid = -1

	apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
		stat_cached=stat_cached, follow_links=follow_links)
	return all_applied
Esempio n. 46
0
def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
	stat_cached=None, follow_links=True):
	"""A wrapper around apply_permissions that uses secpass and simple
	logic to apply as much of the permissions as possible without
	generating an obviously avoidable permission exception. Despite
	attempts to avoid an exception, it's possible that one will be raised
	anyway, so be prepared.
	Returns True if all permissions are applied and False if some are left
	unapplied."""

	if stat_cached is None:
		try:
			if follow_links:
				stat_cached = os.stat(filename)
			else:
				stat_cached = os.lstat(filename)
		except OSError as oe:
			func_call = "stat('%s')" % filename
			if oe.errno == errno.EPERM:
				raise OperationNotPermitted(func_call)
			elif oe.errno == errno.EACCES:
				raise PermissionDenied(func_call)
			elif oe.errno == errno.ENOENT:
				raise FileNotFound(filename)
			else:
				raise

	all_applied = True

	if portage.data.secpass < 2:

		if uid != -1 and \
		uid != stat_cached.st_uid:
			all_applied = False
			uid = -1

		if gid != -1 and \
		gid != stat_cached.st_gid and \
		gid not in os.getgroups():
			all_applied = False
			gid = -1

	apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
		stat_cached=stat_cached, follow_links=follow_links)
	return all_applied
Esempio n. 47
0
def _file_archive_rotate(archive):
    """
    Rename archive to archive + '.1', and perform similar rotation
    for files up to archive + '.9'.

    @param archive: file path to archive
    @type archive: str
    """

    max_suf = 0
    try:
        for max_suf, max_st, max_path in (
            (suf, os.lstat(path), path)
                for suf, path in ((suf, "%s.%s" % (archive, suf))
                                  for suf in range(1, _ARCHIVE_ROTATE_MAX +
                                                   1))):
            pass
    except OSError as e:
        if e.errno not in (errno.ENOENT, errno.ESTALE):
            raise
        # There's already an unused suffix.
    else:
        # Free the max suffix in order to avoid possible problems
        # when we rename another file or directory to the same
        # location (see bug 256376).
        if stat.S_ISDIR(max_st.st_mode):
            # Removing a directory might destroy something important,
            # so rename it instead.
            head, tail = os.path.split(archive)
            placeholder = tempfile.NamedTemporaryFile(prefix="%s." % tail,
                                                      dir=head)
            placeholder.close()
            os.rename(max_path, placeholder.name)
        else:
            os.unlink(max_path)

        # The max suffix is now unused.
        max_suf -= 1

    for suf in range(max_suf + 1, 1, -1):
        os.rename("%s.%s" % (archive, suf - 1), "%s.%s" % (archive, suf))

    os.rename(archive, "%s.1" % (archive, ))
Esempio n. 48
0
def _file_archive_rotate(archive):
	"""
	Rename archive to archive + '.1', and perform similar rotation
	for files up to archive + '.9'.

	@param archive: file path to archive
	@type archive: str
	"""

	max_suf = 0
	try:
		for max_suf, max_st, max_path in (
			(suf, os.lstat(path), path) for suf, path in (
			(suf, "%s.%s" % (archive, suf)) for suf in range(
			1, _ARCHIVE_ROTATE_MAX + 1))):
			pass
	except OSError as e:
		if e.errno not in (errno.ENOENT, errno.ESTALE):
			raise
		# There's already an unused suffix.
	else:
		# Free the max suffix in order to avoid possible problems
		# when we rename another file or directory to the same
		# location (see bug 256376).
		if stat.S_ISDIR(max_st.st_mode):
			# Removing a directory might destroy something important,
			# so rename it instead.
			head, tail = os.path.split(archive)
			placeholder = tempfile.NamedTemporaryFile(
				prefix="%s." % tail,
				dir=head)
			placeholder.close()
			os.rename(max_path, placeholder.name)
		else:
			os.unlink(max_path)

		# The max suffix is now unused.
		max_suf -= 1

	for suf in range(max_suf + 1, 1, -1):
		os.rename("%s.%s" % (archive, suf - 1), "%s.%s" % (archive, suf))

	os.rename(archive, "%s.1" % (archive,))
Esempio n. 49
0
	def pruneNonExisting(self):
		""" Remove all records for objects that no longer exist on the filesystem. """

		os = _os_merge

		for cps in list(self._data):
			cpv, counter, _paths = self._data[cps]

			paths = []
			hardlinks = set()
			symlinks = {}
			for f in _paths:
				f_abs = os.path.join(self._root, f.lstrip(os.sep))
				try:
					lst = os.lstat(f_abs)
				except OSError:
					continue
				if stat.S_ISLNK(lst.st_mode):
					try:
						symlinks[f] = os.readlink(f_abs)
					except OSError:
						continue
				elif stat.S_ISREG(lst.st_mode):
					hardlinks.add(f)
					paths.append(f)

			# Only count symlinks as preserved if they still point to a hardink
			# in the same directory, in order to handle cases where a tool such
			# as eselect-opengl has updated the symlink to point to a hardlink
			# in a different directory (see bug #406837). The unused hardlink
			# is automatically found by _find_unused_preserved_libs, since the
			# soname symlink no longer points to it. After the hardlink is
			# removed by _remove_preserved_libs, it calls pruneNonExisting
			# which eliminates the irrelevant symlink from the registry here.
			for f, target in symlinks.items():
				if abssymlink(f, target=target) in hardlinks:
					paths.append(f)

			if len(paths) > 0:
				self._data[cps] = (cpv, counter, paths)
			else:
				del self._data[cps]
Esempio n. 50
0
    def pruneNonExisting(self):
        """Remove all records for objects that no longer exist on the filesystem."""

        os = _os_merge

        for cps in list(self._data):
            cpv, counter, _paths = self._data[cps]

            paths = []
            hardlinks = set()
            symlinks = {}
            for f in _paths:
                f_abs = os.path.join(self._root, f.lstrip(os.sep))
                try:
                    lst = os.lstat(f_abs)
                except OSError:
                    continue
                if stat.S_ISLNK(lst.st_mode):
                    try:
                        symlinks[f] = os.readlink(f_abs)
                    except OSError:
                        continue
                elif stat.S_ISREG(lst.st_mode):
                    hardlinks.add(f)
                    paths.append(f)

            # Only count symlinks as preserved if they still point to a hardink
            # in the same directory, in order to handle cases where a tool such
            # as eselect-opengl has updated the symlink to point to a hardlink
            # in a different directory (see bug #406837). The unused hardlink
            # is automatically found by _find_unused_preserved_libs, since the
            # soname symlink no longer points to it. After the hardlink is
            # removed by _remove_preserved_libs, it calls pruneNonExisting
            # which eliminates the irrelevant symlink from the registry here.
            for f, target in symlinks.items():
                if abssymlink(f, target=target) in hardlinks:
                    paths.append(f)

            if len(paths) > 0:
                self._data[cps] = (cpv, counter, paths)
            else:
                del self._data[cps]
Esempio n. 51
0
	def testBashSyntax(self):
		locations = [PORTAGE_BIN_PATH]
		misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
		if os.path.isdir(misc_dir):
			locations.append(misc_dir)
		for parent, dirs, files in \
			chain.from_iterable(os.walk(x) for x in locations):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				ext = x.split('.')[-1]
				if ext in ('.py', '.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				# Check for bash shebang
				f = open(_unicode_encode(x,
					encoding=_encodings['fs'], errors='strict'), 'rb')
				line = _unicode_decode(f.readline(),
					encoding=_encodings['content'], errors='replace')
				f.close()
				if line[:2] == '#!' and \
					'bash' in line:
					cmd = [BASH_BINARY, "-n", x]
					if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
						# Python 3.1 does not support bytes in Popen args.
						cmd = [_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict') for x in cmd]
					proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
						stderr=subprocess.STDOUT)
					output = _unicode_decode(proc.communicate()[0],
						encoding=_encodings['fs'])
					status = proc.wait()
					self.assertEqual(os.WIFEXITED(status) and \
						os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
 def testCompileModules(self):
     for parent, dirs, files in itertools.chain(os.walk(PORTAGE_BIN_PATH), os.walk(PORTAGE_PYM_PATH)):
         parent = _unicode_decode(parent, encoding=_encodings["fs"], errors="strict")
         for x in files:
             x = _unicode_decode(x, encoding=_encodings["fs"], errors="strict")
             if x[-4:] in (".pyc", ".pyo"):
                 continue
             x = os.path.join(parent, x)
             st = os.lstat(x)
             if not stat.S_ISREG(st.st_mode):
                 continue
             do_compile = False
             if x[-3:] == ".py":
                 do_compile = True
             else:
                 # Check for python shebang
                 f = open(_unicode_encode(x, encoding=_encodings["fs"], errors="strict"), "rb")
                 line = _unicode_decode(f.readline(), encoding=_encodings["content"], errors="replace")
                 f.close()
                 if line[:2] == "#!" and "python" in line:
                     do_compile = True
             if do_compile:
                 py_compile.compile(x, cfile="/dev/null", doraise=True)
Esempio n. 53
0
	def testCompileModules(self):
		for parent, _dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang.
					try:
						with open(_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict'), 'rb') as f:
							line = _unicode_decode(f.readline(),
								encoding=_encodings['content'], errors='replace')
					except IOError as e:
						# Some tests create files that are unreadable by the
						# user (by design), so ignore EACCES issues.
						if e.errno != errno.EACCES:
							raise
						continue
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
Esempio n. 54
0
def _prepare_features_dirs(mysettings):

	# Use default ABI libdir in accordance with bug #355283.
	libdir = None
	default_abi = mysettings.get("DEFAULT_ABI")
	if default_abi:
		libdir = mysettings.get("LIBDIR_" + default_abi)
	if not libdir:
		libdir = "lib"

	features_dirs = {
		"ccache":{
			"basedir_var":"CCACHE_DIR",
			"default_dir":os.path.join(mysettings["PORTAGE_TMPDIR"], "ccache"),
			"always_recurse":False},
		"distcc":{
			"basedir_var":"DISTCC_DIR",
			"default_dir":os.path.join(mysettings["BUILD_PREFIX"], ".distcc"),
			"subdirs":("lock", "state"),
			"always_recurse":True}
	}
	dirmode  = 0o2070
	filemode =   0o60
	modemask =    0o2
	restrict = mysettings.get("PORTAGE_RESTRICT","").split()
	droppriv = secpass >= 2 and \
		"userpriv" in mysettings.features and \
		"userpriv" not in restrict
	for myfeature, kwargs in features_dirs.items():
		if myfeature in mysettings.features:
			failure = False
			basedir = mysettings.get(kwargs["basedir_var"])
			if basedir is None or not basedir.strip():
				basedir = kwargs["default_dir"]
				mysettings[kwargs["basedir_var"]] = basedir
			try:
				mydirs = [mysettings[kwargs["basedir_var"]]]
				if "subdirs" in kwargs:
					for subdir in kwargs["subdirs"]:
						mydirs.append(os.path.join(basedir, subdir))
				for mydir in mydirs:
					modified = ensure_dirs(mydir)
					# Generally, we only want to apply permissions for
					# initial creation.  Otherwise, we don't know exactly what
					# permissions the user wants, so should leave them as-is.
					droppriv_fix = False
					if droppriv:
						st = os.stat(mydir)
						if st.st_gid != portage_gid or \
							not dirmode == (stat.S_IMODE(st.st_mode) & dirmode):
							droppriv_fix = True
						if not droppriv_fix:
							# Check permissions of files in the directory.
							for filename in os.listdir(mydir):
								try:
									subdir_st = os.lstat(
										os.path.join(mydir, filename))
								except OSError:
									continue
								if subdir_st.st_gid != portage_gid or \
									((stat.S_ISDIR(subdir_st.st_mode) and \
									not dirmode == (stat.S_IMODE(subdir_st.st_mode) & dirmode))):
									droppriv_fix = True
									break

					if droppriv_fix:
						_adjust_perms_msg(mysettings,
							colorize("WARN", " * ") + \
							_("Adjusting permissions "
							"for FEATURES=userpriv: '%s'\n") % mydir)
					elif modified:
						_adjust_perms_msg(mysettings,
							colorize("WARN", " * ") + \
							_("Adjusting permissions "
							"for FEATURES=%s: '%s'\n") % (myfeature, mydir))

					if modified or kwargs["always_recurse"] or droppriv_fix:
						def onerror(e):
							raise	# The feature is disabled if a single error
									# occurs during permissions adjustment.
						if not apply_recursive_permissions(mydir,
						gid=portage_gid, dirmode=dirmode, dirmask=modemask,
						filemode=filemode, filemask=modemask, onerror=onerror):
							raise OperationNotPermitted(
								_("Failed to apply recursive permissions for the portage group."))

			except DirectoryNotFound as e:
				failure = True
				writemsg(_("\n!!! Directory does not exist: '%s'\n") % \
					(e,), noiselevel=-1)
				writemsg(_("!!! Disabled FEATURES='%s'\n") % myfeature,
					noiselevel=-1)

			except PortageException as e:
				failure = True
				writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
				writemsg(_("!!! Failed resetting perms on %s='%s'\n") % \
					(kwargs["basedir_var"], basedir), noiselevel=-1)
				writemsg(_("!!! Disabled FEATURES='%s'\n") % myfeature,
					noiselevel=-1)

			if failure:
				mysettings.features.remove(myfeature)
				time.sleep(5)