def testCompileModules(self):
		for parent, dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						line = _unicode_decode(f.readline(),
							encoding=_encodings['content'], errors='replace')
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
	def testCompileModules(self):
		for parent, dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				cfile = x
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang
					f = open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb')
					line = _unicode_decode(f.readline(),
						encoding=_encodings['content'], errors='replace')
					f.close()
					if line[:2] == '#!' and \
						'python' in line:
						do_compile = True
						cfile += '.py'
				if do_compile:
					cfile += (__debug__ and 'c' or 'o')
					py_compile.compile(x, cfile=cfile, doraise=True)
	def testCompileModules(self):
		iters = [os.walk(os.path.join(PORTAGE_PYM_PATH, x))
			for x in PORTAGE_PYM_PACKAGES]
		iters.append(os.walk(PORTAGE_BIN_PATH))

		for parent, _dirs, files in itertools.chain(*iters):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				bin_path = os.path.relpath(x, PORTAGE_BIN_PATH)
				mod_path = os.path.relpath(x, PORTAGE_PYM_PATH)

				meta = module_metadata.get(mod_path) or script_metadata.get(bin_path)
				if meta:
					req_py = tuple(int(x) for x
							in meta.get('required_python', '0.0').split('.'))
					if sys.version_info < req_py:
						continue

				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang.
					try:
						with open(_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict'), 'rb') as f:
							line = _unicode_decode(f.readline(),
								encoding=_encodings['content'], errors='replace')
					except IOError as e:
						# Some tests create files that are unreadable by the
						# user (by design), so ignore EACCES issues.
						if e.errno != errno.EACCES:
							raise
						continue
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
Esempio n. 4
0
def addtolist(mylist, curdir):
    """(list, dir) --- Takes an array(list) and appends all files from dir down
	the directory tree. Returns nothing. list is modified."""
    curdir = normalize_path(
        _unicode_decode(curdir, encoding=_encodings['fs'], errors='strict'))
    for parent, dirs, files in os.walk(curdir):

        parent = _unicode_decode(parent,
                                 encoding=_encodings['fs'],
                                 errors='strict')
        if parent != curdir:
            mylist.append(parent[len(curdir) + 1:] + os.sep)

        for x in dirs:
            try:
                _unicode_decode(x, encoding=_encodings['fs'], errors='strict')
            except UnicodeDecodeError:
                dirs.remove(x)

        for x in files:
            try:
                x = _unicode_decode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict')
            except UnicodeDecodeError:
                continue
            mylist.append(os.path.join(parent, x)[len(curdir) + 1:])
	def testBashSyntax(self):
		for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				ext = x.split('.')[-1]
				if ext in ('.py', '.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				# Check for bash shebang
				f = open(_unicode_encode(x,
					encoding=_encodings['fs'], errors='strict'), 'rb')
				line = _unicode_decode(f.readline(),
					encoding=_encodings['content'], errors='replace')
				f.close()
				if line[:2] == '#!' and \
					'bash' in line:
					cmd = "%s -n %s" % (_shell_quote(BASH_BINARY), _shell_quote(x))
					status, output = subprocess_getstatusoutput(cmd)
					self.assertEqual(os.WIFEXITED(status) and \
						os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 6
0
def main():

	TEST_FILE = _unicode_encode('__test__',
		encoding=_encodings['fs'], errors='strict')
	svn_dirname = _unicode_encode('.svn',
		encoding=_encodings['fs'], errors='strict')
	suite = unittest.TestSuite()
	basedir = os.path.dirname(os.path.realpath(__file__))
	testDirs = []

  # the os.walk help mentions relative paths as being quirky
	# I was tired of adding dirs to the list, so now we add __test__
	# to each dir we want tested.
	for root, dirs, files in os.walk(basedir):
		if svn_dirname in dirs:
			dirs.remove(svn_dirname)
		try:
			root = _unicode_decode(root,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeDecodeError:
			continue

		if TEST_FILE in files:
			testDirs.append(root)

	for mydir in testDirs:
		suite.addTests(getTests(os.path.join(basedir, mydir), basedir) )
	return TextTestRunner(verbosity=2).run(suite)
Esempio n. 7
0
def main():

    TEST_FILE = _unicode_encode('__test__',
                                encoding=_encodings['fs'],
                                errors='strict')
    svn_dirname = _unicode_encode('.svn',
                                  encoding=_encodings['fs'],
                                  errors='strict')
    suite = unittest.TestSuite()
    basedir = os.path.dirname(os.path.realpath(__file__))
    testDirs = []

    if len(sys.argv) > 1:
        suite.addTests(getTestFromCommandLine(sys.argv[1:], basedir))
        return TextTestRunner(verbosity=2).run(suite)

# the os.walk help mentions relative paths as being quirky
    # I was tired of adding dirs to the list, so now we add __test__
    # to each dir we want tested.
    for root, dirs, files in os.walk(basedir):
        if svn_dirname in dirs:
            dirs.remove(svn_dirname)
        try:
            root = _unicode_decode(root,
                                   encoding=_encodings['fs'],
                                   errors='strict')
        except UnicodeDecodeError:
            continue

        if TEST_FILE in files:
            testDirs.append(root)

    for mydir in testDirs:
        suite.addTests(getTests(os.path.join(basedir, mydir), basedir))
    return TextTestRunner(verbosity=2).run(suite)
Esempio n. 8
0
	def multiBuilder(self, options, settings, trees):
		rValue = {}
		directory = options.get("directory",
			os.path.join(settings["PORTAGE_CONFIGROOT"],
			USER_CONFIG_PATH, "sets"))
		name_pattern = options.get("name_pattern", "${name}")
		if not "$name" in name_pattern and not "${name}" in name_pattern:
			raise SetConfigError(_("name_pattern doesn't include ${name} placeholder"))
		greedy = get_boolean(options, "greedy", False)
		# look for repository path variables
		match = self._repopath_match.match(directory)
		if match:
			try:
				directory = self._repopath_sub.sub(trees["porttree"].dbapi.treemap[match.groupdict()["reponame"]], directory)
			except KeyError:
				raise SetConfigError(_("Could not find repository '%s'") % match.groupdict()["reponame"])

		try:
			directory = _unicode_decode(directory,
				encoding=_encodings['fs'], errors='strict')
			# Now verify that we can also encode it.
			_unicode_encode(directory,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeError:
			directory = _unicode_decode(directory,
				encoding=_encodings['fs'], errors='replace')
			raise SetConfigError(
				_("Directory path contains invalid character(s) for encoding '%s': '%s'") \
				% (_encodings['fs'], directory))

		if os.path.isdir(directory):
			directory = normalize_path(directory)

			for parent, dirs, files in os.walk(directory):
				try:
					parent = _unicode_decode(parent,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				for d in dirs[:]:
					if d[:1] == '.':
						dirs.remove(d)
				for filename in files:
					try:
						filename = _unicode_decode(filename,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						continue
					if filename[:1] == '.':
						continue
					if filename.endswith(".metadata"):
						continue
					filename = os.path.join(parent,
						filename)[1 + len(directory):]
					myname = name_pattern.replace("$name", filename)
					myname = myname.replace("${name}", filename)
					rValue[myname] = StaticFileSet(
						os.path.join(directory, filename),
						greedy=greedy, dbapi=trees["vartree"].dbapi)
		return rValue
Esempio n. 9
0
def RecursiveFileLoader(filename):
    """
	If filename is of type file, return a generate that yields filename
	else if filename is of type directory, return a generator that fields
	files in that directory.
	
	Ignore files beginning with . or ending in ~.
	Prune CVS directories.

	@param filename: name of a file/directory to traverse
	@rtype: list
	@returns: List of files to process
	"""

    try:
        st = os.stat(filename)
    except OSError:
        return
    if stat.S_ISDIR(st.st_mode):
        for root, dirs, files in os.walk(filename):
            for d in list(dirs):
                if d[:1] == '.' or d == 'CVS':
                    dirs.remove(d)
            for f in files:
                try:
                    f = _unicode_decode(f,
                                        encoding=_encodings['fs'],
                                        errors='strict')
                except UnicodeDecodeError:
                    continue
                if f[:1] == '.' or f[-1:] == '~':
                    continue
                yield os.path.join(root, f)
    else:
        yield filename
Esempio n. 10
0
    def testBashSyntax(self):
        for parent, dirs, files in os.walk(PORTAGE_BIN_PATH):
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='strict')
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict')
                ext = x.split('.')[-1]
                if ext in ('.py', '.pyc', '.pyo'):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict'), 'rb')
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings['content'],
                                       errors='replace')
                f.close()
                if line[:2] == '#!' and \
                 'bash' in line:
                    cmd = "%s -n %s" % (_shell_quote(BASH_BINARY),
                                        _shell_quote(x))
                    status, output = subprocess_getstatusoutput(cmd)
                    self.assertEqual(os.WIFEXITED(status) and \
                     os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 11
0
	def multiBuilder(self, options, settings, trees):
		rValue = {}
		directory = options.get("directory",
			os.path.join(settings["PORTAGE_CONFIGROOT"],
			USER_CONFIG_PATH, "sets"))
		name_pattern = options.get("name_pattern", "${name}")
		if not "$name" in name_pattern and not "${name}" in name_pattern:
			raise SetConfigError(_("name_pattern doesn't include ${name} placeholder"))
		greedy = get_boolean(options, "greedy", False)
		# look for repository path variables
		match = self._repopath_match.match(directory)
		if match:
			try:
				directory = self._repopath_sub.sub(trees["porttree"].dbapi.treemap[match.groupdict()["reponame"]], directory)
			except KeyError:
				raise SetConfigError(_("Could not find repository '%s'") % match.groupdict()["reponame"])

		try:
			directory = _unicode_decode(directory,
				encoding=_encodings['fs'], errors='strict')
			# Now verify that we can also encode it.
			_unicode_encode(directory,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeError:
			directory = _unicode_decode(directory,
				encoding=_encodings['fs'], errors='replace')
			raise SetConfigError(
				_("Directory path contains invalid character(s) for encoding '%s': '%s'") \
				% (_encodings['fs'], directory))

		if os.path.isdir(directory):
			directory = normalize_path(directory)

			for parent, dirs, files in os.walk(directory):
				try:
					parent = _unicode_decode(parent,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				for d in dirs[:]:
					if d[:1] == '.':
						dirs.remove(d)
				for filename in files:
					try:
						filename = _unicode_decode(filename,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						continue
					if filename[:1] == '.':
						continue
					if filename.endswith(".metadata"):
						continue
					filename = os.path.join(parent,
						filename)[1 + len(directory):]
					myname = name_pattern.replace("$name", filename)
					myname = myname.replace("${name}", filename)
					rValue[myname] = StaticFileSet(
						os.path.join(directory, filename),
						greedy=greedy, dbapi=trees["vartree"].dbapi)
		return rValue
Esempio n. 12
0
def RecursiveFileLoader(filename):
    """
	If filename is of type file, return a generate that yields filename
	else if filename is of type directory, return a generator that fields
	files in that directory.
	
	Ignore files beginning with . or ending in ~.
	Prune CVS directories.

	@param filename: name of a file/directory to traverse
	@rtype: list
	@returns: List of files to process
	"""

    try:
        st = os.stat(filename)
    except OSError:
        return
    if stat.S_ISDIR(st.st_mode):
        for root, dirs, files in os.walk(filename):
            for d in list(dirs):
                if d[:1] == "." or d == "CVS":
                    dirs.remove(d)
            for f in files:
                try:
                    f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
                except UnicodeDecodeError:
                    continue
                if f[:1] == "." or f[-1:] == "~":
                    continue
                yield os.path.join(root, f)
    else:
        yield filename
Esempio n. 13
0
def addtolist(mylist, curdir):
	"""(list, dir) --- Takes an array(list) and appends all files from dir down
	the directory tree. Returns nothing. list is modified."""
	curdir = normalize_path(_unicode_decode(curdir,
		encoding=_encodings['fs'], errors='strict'))
	for parent, dirs, files in os.walk(curdir):

		parent = _unicode_decode(parent,
			encoding=_encodings['fs'], errors='strict')
		if parent != curdir:
			mylist.append(parent[len(curdir) + 1:] + os.sep)

		for x in dirs:
			try:
				_unicode_decode(x, encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				dirs.remove(x)

		for x in files:
			try:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				continue
			mylist.append(os.path.join(parent, x)[len(curdir) + 1:])
Esempio n. 14
0
    def _update_thick_pkgdir(self, cat, pn, pkgdir):
        cpvlist = []
        for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
            break
        for f in pkgdir_files:
            try:
                f = _unicode_decode(f,
                                    encoding=_encodings['fs'],
                                    errors='strict')
            except UnicodeDecodeError:
                continue
            if f[:1] == ".":
                continue
            pf = self._is_cpv(cat, pn, f)
            if pf is not None:
                mytype = "EBUILD"
                cpvlist.append(pf)
            elif self._find_invalid_path_char(f) == -1 and \
             manifest2MiscfileFilter(f):
                mytype = "MISC"
            else:
                continue
            self.fhashdict[mytype][f] = perform_multiple_checksums(
                self.pkgdir + f, self.hashes)
        recursive_files = []

        pkgdir = self.pkgdir
        cut_len = len(os.path.join(pkgdir, "files") + os.sep)
        for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
            for f in files:
                try:
                    f = _unicode_decode(f,
                                        encoding=_encodings['fs'],
                                        errors='strict')
                except UnicodeDecodeError:
                    continue
                full_path = os.path.join(parentdir, f)
                recursive_files.append(full_path[cut_len:])
        for f in recursive_files:
            if self._find_invalid_path_char(f) != -1 or \
             not manifest2AuxfileFilter(f):
                continue
            self.fhashdict["AUX"][f] = perform_multiple_checksums(
                os.path.join(self.pkgdir, "files", f.lstrip(os.sep)),
                self.hashes)
        return cpvlist
Esempio n. 15
0
    def _update_thick_pkgdir(self, cat, pn, pkgdir):
        _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))
        cpvlist = []
        for f in pkgdir_files:
            try:
                f = _unicode_decode(f,
                                    encoding=_encodings["fs"],
                                    errors="strict")
            except UnicodeDecodeError:
                continue
            if f.startswith("."):
                continue
            pf = self._is_cpv(cat, pn, f)
            if pf is not None:
                mytype = "EBUILD"
                cpvlist.append(pf)
            elif self._find_invalid_path_char(
                    f) == -1 and manifest2MiscfileFilter(f):
                mytype = "MISC"
            else:
                continue
            self.fhashdict[mytype][f] = perform_multiple_checksums(
                f"{self.pkgdir}{f}", self.hashes)
        recursive_files = []

        pkgdir = self.pkgdir
        cut_len = len(os.path.join(pkgdir, f"files{os.sep}"))
        for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
            for f in files:
                try:
                    f = _unicode_decode(f,
                                        encoding=_encodings["fs"],
                                        errors="strict")
                except UnicodeDecodeError:
                    continue
                full_path = os.path.join(parentdir, f)
                recursive_files.append(full_path[cut_len:])
        for f in recursive_files:
            if self._find_invalid_path_char(
                    f) != -1 or not manifest2AuxfileFilter(f):
                continue
            self.fhashdict["AUX"][f] = perform_multiple_checksums(
                os.path.join(self.pkgdir, "files", f.lstrip(os.sep)),
                self.hashes)
        return cpvlist
Esempio n. 16
0
    def _apply_max_mtime(self, preserved_stats, entries):
        """
		Set the Manifest mtime to the max mtime of all relevant files
		and directories. Directory mtimes account for file renames and
		removals. The existing Manifest mtime accounts for eclass
		modifications that change DIST entries. This results in a
		stable/predictable mtime, which is useful when converting thin
		manifests to thick manifests for distribution via rsync. For
		portability, the mtime is set with 1 second resolution.

		@param preserved_stats: maps paths to preserved stat results
			that should be used instead of os.stat() calls
		@type preserved_stats: dict
		@param entries: list of current Manifest2Entry instances
		@type entries: list
		"""
        # Use stat_result[stat.ST_MTIME] for 1 second resolution, since
        # it always rounds down. Note that stat_result.st_mtime will round
        # up from 0.999999999 to 1.0 when precision is lost during conversion
        # from nanosecond resolution to float.
        max_mtime = None
        _update_max = (lambda st: max_mtime if max_mtime is not None and
                       max_mtime > st[stat.ST_MTIME] else st[stat.ST_MTIME])
        _stat = (lambda path: preserved_stats[path]
                 if path in preserved_stats else os.stat(path))

        for stat_result in preserved_stats.values():
            max_mtime = _update_max(stat_result)

        for entry in entries:
            if entry.type == 'DIST':
                continue
            abs_path = (os.path.join(self.pkgdir, 'files', entry.name)
                        if entry.type == 'AUX' else os.path.join(
                            self.pkgdir, entry.name))
            max_mtime = _update_max(_stat(abs_path))

        if not self.thin:
            # Account for changes to all relevant nested directories.
            # This is not necessary for thin manifests because
            # self.pkgdir is already included via preserved_stats.
            for parent_dir, dirs, files in os.walk(self.pkgdir.rstrip(os.sep)):
                try:
                    parent_dir = _unicode_decode(parent_dir,
                                                 encoding=_encodings['fs'],
                                                 errors='strict')
                except UnicodeDecodeError:
                    # If an absolute path cannot be decoded, then it is
                    # always excluded from the manifest (repoman will
                    # report such problems).
                    pass
                else:
                    max_mtime = _update_max(_stat(parent_dir))

        if max_mtime is not None:
            for path in preserved_stats:
                os.utime(path, (max_mtime, max_mtime))
	def testCompileModules(self):
		for parent, dirs, files in os.walk(PORTAGE_PYM_PATH):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-3:] == '.py':
					py_compile.compile(os.path.join(parent, x), doraise=True)
Esempio n. 18
0
	def get_filenames(self, distdir):
		for dirpath, dirnames, filenames in os.walk(distdir,
				onerror=_raise_exc):
			for filename in filenames:
				try:
					yield portage._unicode_decode(filename, errors='strict')
				except UnicodeDecodeError:
					# Ignore it. Distfiles names must have valid UTF8 encoding.
					pass
			return
Esempio n. 19
0
	def _prune_empty_dirs(self):
		all_dirs = []
		for parent, dirs, files in os.walk(self.location):
			for x in dirs:
				all_dirs.append(_os.path.join(parent, x))
		while all_dirs:
			try:
				_os.rmdir(all_dirs.pop())
			except OSError:
				pass
Esempio n. 20
0
	def _apply_max_mtime(self, preserved_stats, entries):
		"""
		Set the Manifest mtime to the max mtime of all relevant files
		and directories. Directory mtimes account for file renames and
		removals. The existing Manifest mtime accounts for eclass
		modifications that change DIST entries. This results in a
		stable/predictable mtime, which is useful when converting thin
		manifests to thick manifests for distribution via rsync. For
		portability, the mtime is set with 1 second resolution.

		@param preserved_stats: maps paths to preserved stat results
			that should be used instead of os.stat() calls
		@type preserved_stats: dict
		@param entries: list of current Manifest2Entry instances
		@type entries: list
		"""
		# Use stat_result[stat.ST_MTIME] for 1 second resolution, since
		# it always rounds down. Note that stat_result.st_mtime will round
		# up from 0.999999999 to 1.0 when precision is lost during conversion
		# from nanosecond resolution to float.
		max_mtime = None
		_update_max = (lambda st: max_mtime if max_mtime is not None
			and max_mtime > st[stat.ST_MTIME] else st[stat.ST_MTIME])
		_stat = (lambda path: preserved_stats[path] if path in preserved_stats
			else os.stat(path))

		for stat_result in preserved_stats.values():
			max_mtime = _update_max(stat_result)

		for entry in entries:
			if entry.type == 'DIST':
				continue
			abs_path = (os.path.join(self.pkgdir, 'files', entry.name) if
				entry.type == 'AUX' else os.path.join(self.pkgdir, entry.name))
			max_mtime = _update_max(_stat(abs_path))

		if not self.thin:
			# Account for changes to all relevant nested directories.
			# This is not necessary for thin manifests because
			# self.pkgdir is already included via preserved_stats.
			for parent_dir, dirs, files in os.walk(self.pkgdir.rstrip(os.sep)):
				try:
					parent_dir = _unicode_decode(parent_dir,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					# If an absolute path cannot be decoded, then it is
					# always excluded from the manifest (repoman will
					# report such problems).
					pass
				else:
					max_mtime = _update_max(_stat(parent_dir))

		if max_mtime is not None:
			for path in preserved_stats:
				os.utime(path, (max_mtime, max_mtime))
Esempio n. 21
0
	def _update_thick_pkgdir(self, cat, pn, pkgdir):
		cpvlist = []
		for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
			break
		for f in pkgdir_files:
			try:
				f = _unicode_decode(f,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				continue
			if f[:1] == ".":
				continue
			pf = self._is_cpv(cat, pn, f)
			if pf is not None:
				mytype = "EBUILD"
				cpvlist.append(pf)
			elif self._find_invalid_path_char(f) == -1 and \
				manifest2MiscfileFilter(f):
				mytype = "MISC"
			else:
				continue
			self.fhashdict[mytype][f] = perform_multiple_checksums(self.pkgdir+f, self.hashes)
		recursive_files = []

		pkgdir = self.pkgdir
		cut_len = len(os.path.join(pkgdir, "files") + os.sep)
		for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
			for f in files:
				try:
					f = _unicode_decode(f,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				full_path = os.path.join(parentdir, f)
				recursive_files.append(full_path[cut_len:])
		for f in recursive_files:
			if self._find_invalid_path_char(f) != -1 or \
				not manifest2AuxfileFilter(f):
				continue
			self.fhashdict["AUX"][f] = perform_multiple_checksums(
				os.path.join(self.pkgdir, "files", f.lstrip(os.sep)), self.hashes)
		return cpvlist
Esempio n. 22
0
    def _getfiles():
        for path, dirs, files in os.walk(os.path.join(global_config_path, "sets")):
            for f in files:
                if not f.startswith(b"."):
                    yield os.path.join(path, f)

        dbapi = trees["porttree"].dbapi
        for repo in dbapi.getRepositories():
            path = dbapi.getRepositoryPath(repo)
            yield os.path.join(path, "sets.conf")

        yield os.path.join(settings["PORTAGE_CONFIGROOT"], USER_CONFIG_PATH, "sets.conf")
Esempio n. 23
0
	def _getfiles():
		for path, dirs, files in os.walk(os.path.join(global_config_path, "sets")):
			for f in files:
				if not f.startswith(b'.'):
					yield os.path.join(path, f)

		dbapi = trees["porttree"].dbapi
		for repo in dbapi.getRepositories():
			path = dbapi.getRepositoryPath(repo)
			yield os.path.join(path, "sets.conf")

		yield os.path.join(settings["PORTAGE_CONFIGROOT"],
			USER_CONFIG_PATH, "sets.conf")
Esempio n. 24
0
def apply_recursive_permissions(top, uid=-1, gid=-1,
	dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
	"""A wrapper around apply_secpass_permissions that applies permissions
	recursively.  If optional argument onerror is specified, it should be a
	function; it will be called with one argument, a PortageException instance.
	Returns True if all permissions are applied and False if some are left
	unapplied."""

	# Avoid issues with circular symbolic links, as in bug #339670.
	follow_links = False

	if onerror is None:
		# Default behavior is to dump errors to stderr so they won't
		# go unnoticed.  Callers can pass in a quiet instance.
		def onerror(e):
			if isinstance(e, OperationNotPermitted):
				writemsg(_("Operation Not Permitted: %s\n") % str(e),
					noiselevel=-1)
			elif isinstance(e, FileNotFound):
				writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
			else:
				raise

	all_applied = True
	for dirpath, dirnames, filenames in os.walk(top):
		try:
			applied = apply_secpass_permissions(dirpath,
				uid=uid, gid=gid, mode=dirmode, mask=dirmask,
				follow_links=follow_links)
			if not applied:
				all_applied = False
		except PortageException as e:
			all_applied = False
			onerror(e)

		for name in filenames:
			try:
				applied = apply_secpass_permissions(os.path.join(dirpath, name),
					uid=uid, gid=gid, mode=filemode, mask=filemask,
					follow_links=follow_links)
				if not applied:
					all_applied = False
			except PortageException as e:
				# Ignore InvalidLocation exceptions such as FileNotFound
				# and DirectoryNotFound since sometimes things disappear,
				# like when adjusting permissions on DISTCC_DIR.
				if not isinstance(e, portage.exception.InvalidLocation):
					all_applied = False
					onerror(e)
	return all_applied
Esempio n. 25
0
    def testBashSyntax(self):
        locations = [PORTAGE_BIN_PATH]
        misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
        if os.path.isdir(misc_dir):
            locations.append(misc_dir)
        for parent, dirs, files in chain.from_iterable(
                os.walk(x) for x in locations):
            parent = _unicode_decode(parent,
                                     encoding=_encodings["fs"],
                                     errors="strict")
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings["fs"],
                                    errors="strict")
                ext = x.split(".")[-1]
                if ext in (".py", ".pyc", ".pyo"):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings["fs"],
                                    errors="strict"), "rb")
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings["content"],
                                       errors="replace")
                f.close()
                if line[:2] == "#!" and "bash" in line:
                    cmd = [BASH_BINARY, "-n", x]
                    cmd = [
                        _unicode_encode(x,
                                        encoding=_encodings["fs"],
                                        errors="strict") for x in cmd
                    ]
                    proc = subprocess.Popen(cmd,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    output = _unicode_decode(proc.communicate()[0],
                                             encoding=_encodings["fs"])
                    status = proc.wait()
                    self.assertEqual(
                        os.WIFEXITED(status)
                        and os.WEXITSTATUS(status) == os.EX_OK,
                        True,
                        msg=output,
                    )
 def testCompileModules(self):
     for parent, dirs, files in itertools.chain(os.walk(PORTAGE_BIN_PATH), os.walk(PORTAGE_PYM_PATH)):
         parent = _unicode_decode(parent, encoding=_encodings["fs"], errors="strict")
         for x in files:
             x = _unicode_decode(x, encoding=_encodings["fs"], errors="strict")
             if x[-4:] in (".pyc", ".pyo"):
                 continue
             x = os.path.join(parent, x)
             st = os.lstat(x)
             if not stat.S_ISREG(st.st_mode):
                 continue
             do_compile = False
             if x[-3:] == ".py":
                 do_compile = True
             else:
                 # Check for python shebang
                 f = open(_unicode_encode(x, encoding=_encodings["fs"], errors="strict"), "rb")
                 line = _unicode_decode(f.readline(), encoding=_encodings["content"], errors="replace")
                 f.close()
                 if line[:2] == "#!" and "python" in line:
                     do_compile = True
             if do_compile:
                 py_compile.compile(x, cfile="/dev/null", doraise=True)
Esempio n. 27
0
	def testCompileModules(self):
		for parent, _dirs, files in itertools.chain(
			os.walk(PORTAGE_BIN_PATH),
			os.walk(PORTAGE_PYM_PATH)):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-4:] in ('.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue
				do_compile = False
				if x[-3:] == '.py':
					do_compile = True
				else:
					# Check for python shebang.
					try:
						with open(_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict'), 'rb') as f:
							line = _unicode_decode(f.readline(),
								encoding=_encodings['content'], errors='replace')
					except IOError as e:
						# Some tests create files that are unreadable by the
						# user (by design), so ignore EACCES issues.
						if e.errno != errno.EACCES:
							raise
						continue
					if line[:2] == '#!' and 'python' in line:
						do_compile = True
				if do_compile:
					with open(_unicode_encode(x,
						encoding=_encodings['fs'], errors='strict'), 'rb') as f:
						compile(f.read(), x, 'exec')
Esempio n. 28
0
    def testBashSyntax(self):
        locations = [PORTAGE_BIN_PATH]
        misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
        if os.path.isdir(misc_dir):
            locations.append(misc_dir)
        for parent, dirs, files in \
         chain.from_iterable(os.walk(x) for x in locations):
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='strict')
            for x in files:
                x = _unicode_decode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict')
                ext = x.split('.')[-1]
                if ext in ('.py', '.pyc', '.pyo'):
                    continue
                x = os.path.join(parent, x)
                st = os.lstat(x)
                if not stat.S_ISREG(st.st_mode):
                    continue

                # Check for bash shebang
                f = open(
                    _unicode_encode(x,
                                    encoding=_encodings['fs'],
                                    errors='strict'), 'rb')
                line = _unicode_decode(f.readline(),
                                       encoding=_encodings['content'],
                                       errors='replace')
                f.close()
                if line[:2] == '#!' and \
                 'bash' in line:
                    cmd = [BASH_BINARY, "-n", x]
                    if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
                        # Python 3.1 does not support bytes in Popen args.
                        cmd = [
                            _unicode_encode(x,
                                            encoding=_encodings['fs'],
                                            errors='strict') for x in cmd
                        ]
                    proc = subprocess.Popen(cmd,
                                            stdout=subprocess.PIPE,
                                            stderr=subprocess.STDOUT)
                    output = _unicode_decode(proc.communicate()[0],
                                             encoding=_encodings['fs'])
                    status = proc.wait()
                    self.assertEqual(os.WIFEXITED(status) and \
                     os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 29
0
 def _update_thin_pkgdir(self, cat, pn, pkgdir):
     for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
         break
     cpvlist = []
     for f in pkgdir_files:
         try:
             f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
         except UnicodeDecodeError:
             continue
         if f[:1] == ".":
             continue
         pf = self._is_cpv(cat, pn, f)
         if pf is not None:
             cpvlist.append(pf)
     return cpvlist
Esempio n. 30
0
	def __iter__(self):

		for root, dirs, files in os.walk(self.portdir):
			for file in files:
				try:
					file = _unicode_decode(file,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				if file[-7:] == '.ebuild':
					cat = os.path.basename(os.path.dirname(root))
					pn_pv = file[:-7]
					path = os.path.join(root,file)
					if self.__has_cache(path):
						yield "%s/%s/%s" % (cat,os.path.basename(root),file[:-7])
Esempio n. 31
0
 def _update_thin_pkgdir(self, cat, pn, pkgdir):
     for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
         break
     cpvlist = []
     for f in pkgdir_files:
         try:
             f = _unicode_decode(f, encoding=_encodings["fs"], errors="strict")
         except UnicodeDecodeError:
             continue
         if f[:1] == ".":
             continue
         pf = self._is_cpv(cat, pn, f)
         if pf is not None:
             cpvlist.append(pf)
     return cpvlist
Esempio n. 32
0
	def __iter__(self):

		for root, dirs, files in os.walk(self.portdir):
			for file in files:
				try:
					file = _unicode_decode(file,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				if file[-7:] == '.ebuild':
					cat = os.path.basename(os.path.dirname(root))
					pn_pv = file[:-7]
					path = os.path.join(root,file)
					if self.__has_cache(path):
						yield "%s/%s/%s" % (cat,os.path.basename(root),file[:-7])
Esempio n. 33
0
	def _getfiles():
		for path, dirs, files in os.walk(os.path.join(global_config_path, "sets")):
			for d in dirs:
				if d in vcs_dirs or d.startswith(b".") or d.endswith(b"~"):
					dirs.remove(d)
			for f in files:
				if not f.startswith(b".") and not f.endswith(b"~"):
					yield os.path.join(path, f)

		dbapi = trees["porttree"].dbapi
		for repo in dbapi.getRepositories():
			path = dbapi.getRepositoryPath(repo)
			yield os.path.join(path, "sets.conf")

		yield os.path.join(settings["PORTAGE_CONFIGROOT"],
			USER_CONFIG_PATH, "sets.conf")
Esempio n. 34
0
def install_mask_dir(base_dir, install_mask, onerror=None):
    """
	Remove files and directories matched by INSTALL_MASK.

	@param base_dir: directory path corresponding to ${ED}
	@type base_dir: str
	@param install_mask: INSTALL_MASK configuration
	@type install_mask: InstallMask
	"""
    onerror = onerror or _raise_exc
    base_dir = normalize_path(base_dir)
    base_dir_len = len(base_dir) + 1
    dir_stack = []

    # Remove masked files.
    for parent, dirs, files in os.walk(base_dir, onerror=onerror):
        try:
            parent = _unicode_decode(parent, errors='strict')
        except UnicodeDecodeError:
            continue
        dir_stack.append(parent)
        for fname in files:
            try:
                fname = _unicode_decode(fname, errors='strict')
            except UnicodeDecodeError:
                continue
            abs_path = os.path.join(parent, fname)
            relative_path = abs_path[base_dir_len:]
            if install_mask.match(relative_path):
                try:
                    os.unlink(abs_path)
                except OSError as e:
                    onerror(e)

    # Remove masked dirs (unless non-empty due to exclusions).
    while True:
        try:
            dir_path = dir_stack.pop()
        except IndexError:
            break

        if install_mask.match(dir_path[base_dir_len:] + '/'):
            try:
                os.rmdir(dir_path)
            except OSError:
                pass
	def _iter_modules(self, base_dir):
		for parent, dirs, files in os.walk(base_dir):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			parent_mod = parent[len(PORTAGE_PYM_PATH)+1:]
			parent_mod = parent_mod.replace("/", ".")
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				if x[-3:] != '.py':
					continue
				x = x[:-3]
				if x[-8:] == '__init__':
					x = parent_mod
				else:
					x = parent_mod + "." + x
				yield x
Esempio n. 36
0
def install_mask_dir(base_dir, install_mask, onerror=None):
	"""
	Remove files and directories matched by INSTALL_MASK.

	@param base_dir: directory path corresponding to ${ED}
	@type base_dir: str
	@param install_mask: INSTALL_MASK configuration
	@type install_mask: InstallMask
	"""
	onerror = onerror or _raise_exc
	base_dir = normalize_path(base_dir)
	base_dir_len = len(base_dir) + 1
	dir_stack = []

	# Remove masked files.
	for parent, dirs, files in os.walk(base_dir, onerror=onerror):
		try:
			parent = _unicode_decode(parent, errors='strict')
		except UnicodeDecodeError:
			continue
		dir_stack.append(parent)
		for fname in files:
			try:
				fname = _unicode_decode(fname, errors='strict')
			except UnicodeDecodeError:
				continue
			abs_path = os.path.join(parent, fname)
			relative_path = abs_path[base_dir_len:]
			if install_mask.match(relative_path):
				try:
					os.unlink(abs_path)
				except OSError as e:
					onerror(e)

	# Remove masked dirs (unless non-empty due to exclusions).
	while True:
		try:
			dir_path = dir_stack.pop()
		except IndexError:
			break

		if install_mask.match(dir_path[base_dir_len:] + '/'):
			try:
				os.rmdir(dir_path)
			except OSError:
				pass
Esempio n. 37
0
def getTestDirs(base_path):
    TEST_FILE = b"__test__"
    testDirs = []

    # the os.walk help mentions relative paths as being quirky
    # I was tired of adding dirs to the list, so now we add __test__
    # to each dir we want tested.
    for root, dirs, files in os.walk(base_path):
        try:
            root = _unicode_decode(root, encoding=_encodings["fs"], errors="strict")
        except UnicodeDecodeError:
            continue

        if TEST_FILE in files:
            testDirs.append(root)

    testDirs.sort()
    return testDirs
Esempio n. 38
0
def getTestDirs(base_path):
	TEST_FILE = b'__test__.py'
	testDirs = []

	# the os.walk help mentions relative paths as being quirky
	# I was tired of adding dirs to the list, so now we add __test__.py
	# to each dir we want tested.
	for root, dirs, files in os.walk(base_path):
		try:
			root = _unicode_decode(root,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeDecodeError:
			continue

		if TEST_FILE in files:
			testDirs.append(root)

	testDirs.sort()
	return testDirs
 def _iter_modules(self, base_dir):
     for parent, dirs, files in os.walk(base_dir):
         parent = _unicode_decode(parent,
                                  encoding=_encodings['fs'],
                                  errors='strict')
         parent_mod = parent[len(PORTAGE_PYM_PATH) + 1:]
         parent_mod = parent_mod.replace("/", ".")
         for x in files:
             x = _unicode_decode(x,
                                 encoding=_encodings['fs'],
                                 errors='strict')
             if x[-3:] != '.py':
                 continue
             x = x[:-3]
             if x[-8:] == '__init__':
                 x = parent_mod
             else:
                 x = parent_mod + "." + x
             yield x
Esempio n. 40
0
    def _update_thin_pkgdir(self, cat, pn, pkgdir):
        _, _, pkgdir_files = next(os.walk(pkgdir), (None, None, None))

        def _process_for_cpv(filename):
            try:
                filename = _unicode_decode(filename,
                                           encoding=_encodings["fs"],
                                           errors="strict")
            except UnicodeDecodeError:
                return None
            if filename.startswith("."):
                return None
            pf = self._is_cpv(cat, pn, filename)
            if pf is not None:
                return pf

        processed = (_process_for_cpv(filename) for filename in pkgdir_files)
        cpvlist = [pf for pf in processed if pf]
        return cpvlist
Esempio n. 41
0
	def testBashSyntax(self):
		locations = [PORTAGE_BIN_PATH]
		misc_dir = os.path.join(PORTAGE_BASE_PATH, "misc")
		if os.path.isdir(misc_dir):
			locations.append(misc_dir)
		for parent, dirs, files in \
			chain.from_iterable(os.walk(x) for x in locations):
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
			for x in files:
				x = _unicode_decode(x,
					encoding=_encodings['fs'], errors='strict')
				ext = x.split('.')[-1]
				if ext in ('.py', '.pyc', '.pyo'):
					continue
				x = os.path.join(parent, x)
				st = os.lstat(x)
				if not stat.S_ISREG(st.st_mode):
					continue

				# Check for bash shebang
				f = open(_unicode_encode(x,
					encoding=_encodings['fs'], errors='strict'), 'rb')
				line = _unicode_decode(f.readline(),
					encoding=_encodings['content'], errors='replace')
				f.close()
				if line[:2] == '#!' and \
					'bash' in line:
					cmd = [BASH_BINARY, "-n", x]
					if sys.hexversion < 0x3000000 or sys.hexversion >= 0x3020000:
						# Python 3.1 does not support bytes in Popen args.
						cmd = [_unicode_encode(x,
							encoding=_encodings['fs'], errors='strict') for x in cmd]
					proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
						stderr=subprocess.STDOUT)
					output = _unicode_decode(proc.communicate()[0],
						encoding=_encodings['fs'])
					status = proc.wait()
					self.assertEqual(os.WIFEXITED(status) and \
						os.WEXITSTATUS(status) == os.EX_OK, True, msg=output)
Esempio n. 42
0
def getTestDirs(base_path):
	TEST_FILE = b'__test__'
	svn_dirname = b'.svn'
	testDirs = []

	# the os.walk help mentions relative paths as being quirky
	# I was tired of adding dirs to the list, so now we add __test__
	# to each dir we want tested.
	for root, dirs, files in os.walk(base_path):
		if svn_dirname in dirs:
			dirs.remove(svn_dirname)
		try:
			root = _unicode_decode(root,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeDecodeError:
			continue

		if TEST_FILE in files:
			testDirs.append(root)

	testDirs.sort()
	return testDirs
Esempio n. 43
0
    def create(self,
               checkExisting=False,
               assumeDistHashesSometimes=False,
               assumeDistHashesAlways=False,
               requiredDistfiles=[]):
        """ Recreate this Manifest from scratch.  This will not use any
		existing checksums unless assumeDistHashesSometimes or
		assumeDistHashesAlways is true (assumeDistHashesSometimes will only
		cause DIST checksums to be reused if the file doesn't exist in
		DISTDIR).  The requiredDistfiles parameter specifies a list of
		distfiles to raise a FileNotFound exception for (if no file or existing
		checksums are available), and defaults to all distfiles when not
		specified."""
        if checkExisting:
            self.checkAllHashes()
        if assumeDistHashesSometimes or assumeDistHashesAlways:
            distfilehashes = self.fhashdict["DIST"]
        else:
            distfilehashes = {}
        self.__init__(
            self.pkgdir,
            self.distdir,
            fetchlist_dict=self.fetchlist_dict,
            from_scratch=True,
            manifest1_compat=False)
        cpvlist = []
        pn = os.path.basename(self.pkgdir.rstrip(os.path.sep))
        cat = self._pkgdir_category()

        pkgdir = self.pkgdir

        for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
            break
        for f in pkgdir_files:
            try:
                f = _unicode_decode(
                    f, encoding=_encodings['fs'], errors='strict')
            except UnicodeDecodeError:
                continue
            if f[:1] == ".":
                continue
            pf = None
            if portage._glep_55_enabled:
                pf, eapi = portage._split_ebuild_name_glep55(f)
            elif f[-7:] == '.ebuild':
                pf = f[:-7]
            if pf is not None:
                mytype = "EBUILD"
                ps = portage.versions._pkgsplit(pf)
                cpv = "%s/%s" % (cat, pf)
                if not ps:
                    raise PortagePackageException(
                        _("Invalid package name: '%s'") % cpv)
                if ps[0] != pn:
                    raise PortagePackageException(
                        _("Package name does not "
                          "match directory name: '%s'") % cpv)
                cpvlist.append(cpv)
            elif manifest2MiscfileFilter(f):
                mytype = "MISC"
            else:
                continue
            self.fhashdict[mytype][f] = perform_multiple_checksums(
                self.pkgdir + f, self.hashes)
        recursive_files = []

        pkgdir = self.pkgdir
        cut_len = len(os.path.join(pkgdir, "files") + os.sep)
        for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
            for f in files:
                try:
                    f = _unicode_decode(
                        f, encoding=_encodings['fs'], errors='strict')
                except UnicodeDecodeError:
                    continue
                full_path = os.path.join(parentdir, f)
                recursive_files.append(full_path[cut_len:])
        for f in recursive_files:
            if not manifest2AuxfileFilter(f):
                continue
            self.fhashdict["AUX"][f] = perform_multiple_checksums(
                os.path.join(self.pkgdir, "files", f.lstrip(os.sep)),
                self.hashes)
        distlist = set()
        for cpv in cpvlist:
            distlist.update(self._getCpvDistfiles(cpv))
        if requiredDistfiles is None:
            # This allows us to force removal of stale digests for the
            # ebuild --force digest option (no distfiles are required).
            requiredDistfiles = set()
        elif len(requiredDistfiles) == 0:
            # repoman passes in an empty list, which implies that all distfiles
            # are required.
            requiredDistfiles = distlist.copy()
        required_hash_types = set()
        required_hash_types.add("size")
        required_hash_types.add(portage.const.MANIFEST2_REQUIRED_HASH)
        for f in distlist:
            fname = os.path.join(self.distdir, f)
            mystat = None
            try:
                mystat = os.stat(fname)
            except OSError:
                pass
            if f in distfilehashes and \
             not required_hash_types.difference(distfilehashes[f]) and \
             ((assumeDistHashesSometimes and mystat is None) or \
             (assumeDistHashesAlways and mystat is None) or \
             (assumeDistHashesAlways and mystat is not None and \
             len(distfilehashes[f]) == len(self.hashes) and \
             distfilehashes[f]["size"] == mystat.st_size)):
                self.fhashdict["DIST"][f] = distfilehashes[f]
            else:
                try:
                    self.fhashdict["DIST"][f] = perform_multiple_checksums(
                        fname, self.hashes)
                except FileNotFound:
                    if f in requiredDistfiles:
                        raise
Esempio n. 44
0
def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
	"""
	Verifies checksums. Assumes all files have been downloaded.
	@rtype: int
	@return: 1 on success and 0 on failure
	"""

	if justmanifest is not None:
		warnings.warn("The justmanifest parameter of the " + \
			"portage.package.ebuild.digestcheck.digestcheck()" + \
			" function is now unused.",
			DeprecationWarning, stacklevel=2)
		justmanifest = None

	if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
		return 1
	pkgdir = mysettings["O"]
	hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
	if hash_filter.transparent:
		hash_filter = None
	if mf is None:
		mf = mysettings.repositories.get_repo_for_location(
			os.path.dirname(os.path.dirname(pkgdir)))
		mf = mf.load_manifest(pkgdir, mysettings["DISTDIR"])
	eout = EOutput()
	eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
	try:
		if not mf.thin and strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
			if mf.fhashdict.get("EBUILD"):
				eout.ebegin(_("checking ebuild checksums ;-)"))
				mf.checkTypeHashes("EBUILD", hash_filter=hash_filter)
				eout.eend(0)
			if mf.fhashdict.get("AUX"):
				eout.ebegin(_("checking auxfile checksums ;-)"))
				mf.checkTypeHashes("AUX", hash_filter=hash_filter)
				eout.eend(0)
			if mf.fhashdict.get("MISC"):
				eout.ebegin(_("checking miscfile checksums ;-)"))
				mf.checkTypeHashes("MISC", ignoreMissingFiles=True,
					hash_filter=hash_filter)
				eout.eend(0)
		for f in myfiles:
			eout.ebegin(_("checking %s ;-)") % f)
			ftype = mf.findFile(f)
			if ftype is None:
				if mf.allow_missing:
					continue
				eout.eend(1)
				writemsg(_("\n!!! Missing digest for '%s'\n") % (f,),
					noiselevel=-1)
				return 0
			mf.checkFileHashes(ftype, f, hash_filter=hash_filter)
			eout.eend(0)
	except FileNotFound as e:
		eout.eend(1)
		writemsg(_("\n!!! A file listed in the Manifest could not be found: %s\n") % str(e),
			noiselevel=-1)
		return 0
	except DigestException as e:
		eout.eend(1)
		writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
		writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
		writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
		writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
		writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
		return 0
	if mf.thin or mf.allow_missing:
		# In this case we ignore any missing digests that
		# would otherwise be detected below.
		return 1
	# Make sure that all of the ebuilds are actually listed in the Manifest.
	for f in os.listdir(pkgdir):
		pf = None
		if f[-7:] == '.ebuild':
			pf = f[:-7]
		if pf is not None and not mf.hasFile("EBUILD", f):
			writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
				os.path.join(pkgdir, f), noiselevel=-1)
			if strict:
				return 0
	# epatch will just grab all the patches out of a directory, so we have to
	# make sure there aren't any foreign files that it might grab.
	filesdir = os.path.join(pkgdir, "files")

	for parent, dirs, files in os.walk(filesdir):
		try:
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeDecodeError:
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='replace')
			writemsg(_("!!! Path contains invalid "
				"character(s) for encoding '%s': '%s'") \
				% (_encodings['fs'], parent), noiselevel=-1)
			if strict:
				return 0
			continue
		for d in dirs:
			d_bytes = d
			try:
				d = _unicode_decode(d,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				d = _unicode_decode(d,
					encoding=_encodings['fs'], errors='replace')
				writemsg(_("!!! Path contains invalid "
					"character(s) for encoding '%s': '%s'") \
					% (_encodings['fs'], os.path.join(parent, d)),
					noiselevel=-1)
				if strict:
					return 0
				dirs.remove(d_bytes)
				continue
			if d.startswith(".") or d == "CVS":
				dirs.remove(d_bytes)
		for f in files:
			try:
				f = _unicode_decode(f,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				f = _unicode_decode(f,
					encoding=_encodings['fs'], errors='replace')
				if f.startswith("."):
					continue
				f = os.path.join(parent, f)[len(filesdir) + 1:]
				writemsg(_("!!! File name contains invalid "
					"character(s) for encoding '%s': '%s'") \
					% (_encodings['fs'], f), noiselevel=-1)
				if strict:
					return 0
				continue
			if f.startswith("."):
				continue
			f = os.path.join(parent, f)[len(filesdir) + 1:]
			file_type = mf.findFile(f)
			if file_type != "AUX" and not f.startswith("digest-"):
				writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
					os.path.join(filesdir, f), noiselevel=-1)
				if strict:
					return 0
	return 1
Esempio n. 45
0
def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
    """
    Verifies checksums. Assumes all files have been downloaded.
    @rtype: int
    @return: 1 on success and 0 on failure
    """

    if justmanifest is not None:
        warnings.warn(
            "The justmanifest parameter of the " +
            "portage.package.ebuild.digestcheck.digestcheck()" +
            " function is now unused.",
            DeprecationWarning,
            stacklevel=2,
        )
        justmanifest = None

    if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
        return 1
    pkgdir = mysettings["O"]
    hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
    if hash_filter.transparent:
        hash_filter = None
    if mf is None:
        mf = mysettings.repositories.get_repo_for_location(
            os.path.dirname(os.path.dirname(pkgdir)))
        mf = mf.load_manifest(pkgdir, mysettings["DISTDIR"])
    eout = EOutput()
    eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
    try:
        if not mf.thin and strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
            if mf.fhashdict.get("EBUILD"):
                eout.ebegin(_("checking ebuild checksums ;-)"))
                mf.checkTypeHashes("EBUILD", hash_filter=hash_filter)
                eout.eend(0)
            if mf.fhashdict.get("AUX"):
                eout.ebegin(_("checking auxfile checksums ;-)"))
                mf.checkTypeHashes("AUX", hash_filter=hash_filter)
                eout.eend(0)
            if mf.strict_misc_digests and mf.fhashdict.get("MISC"):
                eout.ebegin(_("checking miscfile checksums ;-)"))
                mf.checkTypeHashes("MISC",
                                   ignoreMissingFiles=True,
                                   hash_filter=hash_filter)
                eout.eend(0)
        for f in myfiles:
            eout.ebegin(_("checking %s ;-)") % f)
            ftype = mf.findFile(f)
            if ftype is None:
                if mf.allow_missing:
                    continue
                eout.eend(1)
                writemsg(_("\n!!! Missing digest for '%s'\n") % (f, ),
                         noiselevel=-1)
                return 0
            mf.checkFileHashes(ftype, f, hash_filter=hash_filter)
            eout.eend(0)
    except FileNotFound as e:
        eout.eend(1)
        writemsg(
            _("\n!!! A file listed in the Manifest could not be found: %s\n") %
            str(e),
            noiselevel=-1,
        )
        return 0
    except DigestException as e:
        eout.eend(1)
        writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
        writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
        writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
        writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
        writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
        return 0
    if mf.thin or mf.allow_missing:
        # In this case we ignore any missing digests that
        # would otherwise be detected below.
        return 1
    # Make sure that all of the ebuilds are actually listed in the Manifest.
    for f in os.listdir(pkgdir):
        pf = None
        if f[-7:] == ".ebuild":
            pf = f[:-7]
        if pf is not None and not mf.hasFile("EBUILD", f):
            writemsg(
                _("!!! A file is not listed in the Manifest: '%s'\n") %
                os.path.join(pkgdir, f),
                noiselevel=-1,
            )
            if strict:
                return 0
    # epatch will just grab all the patches out of a directory, so we have to
    # make sure there aren't any foreign files that it might grab.
    filesdir = os.path.join(pkgdir, "files")

    for parent, dirs, files in os.walk(filesdir):
        try:
            parent = _unicode_decode(parent,
                                     encoding=_encodings["fs"],
                                     errors="strict")
        except UnicodeDecodeError:
            parent = _unicode_decode(parent,
                                     encoding=_encodings["fs"],
                                     errors="replace")
            writemsg(
                _("!!! Path contains invalid "
                  "character(s) for encoding '%s': '%s'") %
                (_encodings["fs"], parent),
                noiselevel=-1,
            )
            if strict:
                return 0
            continue
        for d in dirs:
            d_bytes = d
            try:
                d = _unicode_decode(d,
                                    encoding=_encodings["fs"],
                                    errors="strict")
            except UnicodeDecodeError:
                d = _unicode_decode(d,
                                    encoding=_encodings["fs"],
                                    errors="replace")
                writemsg(
                    _("!!! Path contains invalid "
                      "character(s) for encoding '%s': '%s'") %
                    (_encodings["fs"], os.path.join(parent, d)),
                    noiselevel=-1,
                )
                if strict:
                    return 0
                dirs.remove(d_bytes)
                continue
            if d.startswith(".") or d == "CVS":
                dirs.remove(d_bytes)
        for f in files:
            try:
                f = _unicode_decode(f,
                                    encoding=_encodings["fs"],
                                    errors="strict")
            except UnicodeDecodeError:
                f = _unicode_decode(f,
                                    encoding=_encodings["fs"],
                                    errors="replace")
                if f.startswith("."):
                    continue
                f = os.path.join(parent, f)[len(filesdir) + 1:]
                writemsg(
                    _("!!! File name contains invalid "
                      "character(s) for encoding '%s': '%s'") %
                    (_encodings["fs"], f),
                    noiselevel=-1,
                )
                if strict:
                    return 0
                continue
            if f.startswith("."):
                continue
            f = os.path.join(parent, f)[len(filesdir) + 1:]
            file_type = mf.findFile(f)
            if file_type != "AUX" and not f.startswith("digest-"):
                writemsg(
                    _("!!! A file is not listed in the Manifest: '%s'\n") %
                    os.path.join(filesdir, f),
                    noiselevel=-1,
                )
                if strict:
                    return 0
    return 1
Esempio n. 46
0
def update_config_files(config_root, protect, protect_mask, update_iter, match_callback = None):
	"""Perform global updates on /etc/portage/package.*, /etc/portage/profile/package.*,
	/etc/portage/profile/packages and /etc/portage/sets.
	config_root - location of files to update
	protect - list of paths from CONFIG_PROTECT
	protect_mask - list of paths from CONFIG_PROTECT_MASK
	update_iter - list of update commands as returned from parse_updates(),
		or dict of {repo_name: list}
	match_callback - a callback which will be called with three arguments:
		match_callback(repo_name, old_atom, new_atom)
	and should return boolean value determining whether to perform the update"""

	repo_dict = None
	if isinstance(update_iter, dict):
		repo_dict = update_iter
	if match_callback is None:
		def match_callback(repo_name, atoma, atomb):
			return True
	config_root = normalize_path(config_root)
	update_files = {}
	file_contents = {}
	myxfiles = [
		"package.accept_keywords", "package.env",
		"package.keywords", "package.license",
		"package.mask", "package.properties",
		"package.unmask", "package.use", "sets"
	]
	myxfiles += [os.path.join("profile", x) for x in (
		"packages", "package.accept_keywords",
		"package.keywords", "package.mask",
		"package.unmask", "package.use",
		"package.use.force", "package.use.mask",
		"package.use.stable.force", "package.use.stable.mask"
	)]
	abs_user_config = os.path.join(config_root, USER_CONFIG_PATH)
	recursivefiles = []
	for x in myxfiles:
		config_file = os.path.join(abs_user_config, x)
		if os.path.isdir(config_file):
			for parent, dirs, files in os.walk(config_file):
				try:
					parent = _unicode_decode(parent,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				for y_enc in list(dirs):
					try:
						y = _unicode_decode(y_enc,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						dirs.remove(y_enc)
						continue
					if y.startswith(".") or y in VCS_DIRS:
						dirs.remove(y_enc)
				for y in files:
					try:
						y = _unicode_decode(y,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						continue
					if y.startswith("."):
						continue
					recursivefiles.append(
						os.path.join(parent, y)[len(abs_user_config) + 1:])
		else:
			recursivefiles.append(x)
	myxfiles = recursivefiles
	for x in myxfiles:
		f = None
		try:
			f = io.open(
				_unicode_encode(os.path.join(abs_user_config, x),
				encoding=_encodings['fs'], errors='strict'),
				mode='r', encoding=_encodings['content'],
				errors='replace')
			file_contents[x] = f.readlines()
		except IOError:
			continue
		finally:
			if f is not None:
				f.close()

	ignore_line_re = re.compile(r'^#|^\s*$')
	if repo_dict is None:
		update_items = [(None, update_iter)]
	else:
		update_items = [x for x in repo_dict.items() if x[0] != 'DEFAULT']
	for repo_name, update_iter in update_items:
		for update_cmd in update_iter:
			for x, contents in file_contents.items():
				skip_next = False
				for pos, line in enumerate(contents):
					if skip_next:
						skip_next = False
						continue
					if ignore_line_re.match(line):
						continue
					atom = line.split()[0]
					if atom[:1] == "-":
						# package.mask supports incrementals
						atom = atom[1:]
					if atom[:1] == "*":
						# packages file supports "*"-prefixed atoms as indication of system packages.
						atom = atom[1:]
					if not isvalidatom(atom):
						continue
					new_atom = update_dbentry(update_cmd, atom)
					if atom != new_atom:
						if match_callback(repo_name, atom, new_atom):
							# add a comment with the update command, so
							# the user can clearly see what happened
							contents[pos] = "# %s\n" % \
								" ".join("%s" % (x,) for x in update_cmd)
							contents.insert(pos + 1,
								line.replace("%s" % (atom,),
								"%s" % (new_atom,), 1))
							# we've inserted an additional line, so we need to
							# skip it when it's reached in the next iteration
							skip_next = True
							update_files[x] = 1
							sys.stdout.write("p")
							sys.stdout.flush()

	protect_obj = ConfigProtect(
		config_root, protect, protect_mask)
	for x in update_files:
		updating_file = os.path.join(abs_user_config, x)
		if protect_obj.isprotected(updating_file):
			updating_file = new_protect_filename(updating_file)
		try:
			write_atomic(updating_file, "".join(file_contents[x]))
		except PortageException as e:
			writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
			writemsg(_("!!! An error occurred while updating a config file:") + \
				" '%s'\n" % updating_file, noiselevel=-1)
			continue
Esempio n. 47
0
def update_config_files(config_root, protect, protect_mask, update_iter):
	"""Perform global updates on /etc/portage/package.*.
	config_root - location of files to update
	protect - list of paths from CONFIG_PROTECT
	protect_mask - list of paths from CONFIG_PROTECT_MASK
	update_iter - list of update commands as returned from parse_updates()"""

	config_root = normalize_path(config_root)
	update_files = {}
	file_contents = {}
	myxfiles = ["package.mask", "package.unmask", \
		"package.keywords", "package.use"]
	myxfiles += [os.path.join("profile", x) for x in myxfiles]
	abs_user_config = os.path.join(config_root, USER_CONFIG_PATH)
	recursivefiles = []
	for x in myxfiles:
		config_file = os.path.join(abs_user_config, x)
		if os.path.isdir(config_file):
			for parent, dirs, files in os.walk(config_file):
				try:
					parent = _unicode_decode(parent,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				for y_enc in list(dirs):
					try:
						y = _unicode_decode(y_enc,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						dirs.remove(y_enc)
						continue
					if y.startswith("."):
						dirs.remove(y_enc)
				for y in files:
					try:
						y = _unicode_decode(y,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						continue
					if y.startswith("."):
						continue
					recursivefiles.append(
						os.path.join(parent, y)[len(abs_user_config) + 1:])
		else:
			recursivefiles.append(x)
	myxfiles = recursivefiles
	for x in myxfiles:
		try:
			file_contents[x] = codecs.open(
				_unicode_encode(os.path.join(abs_user_config, x),
				encoding=_encodings['fs'], errors='strict'),
				mode='r', encoding=_encodings['content'],
				errors='replace').readlines()
		except IOError:
			continue

	# update /etc/portage/packages.*
	ignore_line_re = re.compile(r'^#|^\s*$')
	for update_cmd in update_iter:
		for x, contents in file_contents.items():
			for pos, line in enumerate(contents):
				if ignore_line_re.match(line):
					continue
				atom = line.split()[0]
				if atom.startswith("-"):
					# package.mask supports incrementals
					atom = atom[1:]
				if not isvalidatom(atom):
					continue
				new_atom = update_dbentry(update_cmd, atom)
				if atom != new_atom:
					contents[pos] = line.replace(atom, new_atom)
					update_files[x] = 1
					sys.stdout.write("p")
					sys.stdout.flush()

	protect_obj = ConfigProtect(
		config_root, protect, protect_mask)
	for x in update_files:
		updating_file = os.path.join(abs_user_config, x)
		if protect_obj.isprotected(updating_file):
			updating_file = new_protect_filename(updating_file)
		try:
			write_atomic(updating_file, "".join(file_contents[x]))
		except PortageException as e:
			writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
			writemsg(_("!!! An error occured while updating a config file:") + \
				" '%s'\n" % updating_file, noiselevel=-1)
			continue
Esempio n. 48
0
def digestcheck(myfiles, mysettings, strict=False, justmanifest=None):
    """
	Verifies checksums. Assumes all files have been downloaded.
	@rtype: int
	@returns: 1 on success and 0 on failure
	"""

    if justmanifest is not None:
        warnings.warn("The justmanifest parameter of the " + \
         "portage.package.ebuild.digestcheck.digestcheck()" + \
         " function is now unused.",
         DeprecationWarning, stacklevel=2)
        justmanifest = None

    if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
        return 1
    pkgdir = mysettings["O"]
    manifest_path = os.path.join(pkgdir, "Manifest")
    if not os.path.exists(manifest_path):
        writemsg(_("!!! Manifest file not found: '%s'\n") % manifest_path,
                 noiselevel=-1)
        if strict:
            return 0
        else:
            return 1
    mf = Manifest(pkgdir, mysettings["DISTDIR"])
    manifest_empty = True
    for d in mf.fhashdict.values():
        if d:
            manifest_empty = False
            break
    if manifest_empty:
        writemsg(_("!!! Manifest is empty: '%s'\n") % manifest_path,
                 noiselevel=-1)
        if strict:
            return 0
        else:
            return 1
    eout = EOutput()
    eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
    try:
        if strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
            eout.ebegin(_("checking ebuild checksums ;-)"))
            mf.checkTypeHashes("EBUILD")
            eout.eend(0)
            eout.ebegin(_("checking auxfile checksums ;-)"))
            mf.checkTypeHashes("AUX")
            eout.eend(0)
            eout.ebegin(_("checking miscfile checksums ;-)"))
            mf.checkTypeHashes("MISC", ignoreMissingFiles=True)
            eout.eend(0)
        for f in myfiles:
            eout.ebegin(_("checking %s ;-)") % f)
            ftype = mf.findFile(f)
            if ftype is None:
                raise KeyError(f)
            mf.checkFileHashes(ftype, f)
            eout.eend(0)
    except KeyError as e:
        eout.eend(1)
        writemsg(_("\n!!! Missing digest for %s\n") % str(e), noiselevel=-1)
        return 0
    except FileNotFound as e:
        eout.eend(1)
        writemsg(
            _("\n!!! A file listed in the Manifest could not be found: %s\n") %
            str(e),
            noiselevel=-1)
        return 0
    except DigestException as e:
        eout.eend(1)
        writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
        writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
        writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
        writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
        writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
        return 0
    # Make sure that all of the ebuilds are actually listed in the Manifest.
    for f in os.listdir(pkgdir):
        pf = None
        if f[-7:] == '.ebuild':
            pf = f[:-7]
        if pf is not None and not mf.hasFile("EBUILD", f):
            writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
             os.path.join(pkgdir, f), noiselevel=-1)
            if strict:
                return 0
    """ epatch will just grab all the patches out of a directory, so we have to
	make sure there aren't any foreign files that it might grab."""
    filesdir = os.path.join(pkgdir, "files")

    for parent, dirs, files in os.walk(filesdir):
        try:
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='strict')
        except UnicodeDecodeError:
            parent = _unicode_decode(parent,
                                     encoding=_encodings['fs'],
                                     errors='replace')
            writemsg(_("!!! Path contains invalid "
             "character(s) for encoding '%s': '%s'") \
             % (_encodings['fs'], parent), noiselevel=-1)
            if strict:
                return 0
            continue
        for d in dirs:
            d_bytes = d
            try:
                d = _unicode_decode(d,
                                    encoding=_encodings['fs'],
                                    errors='strict')
            except UnicodeDecodeError:
                d = _unicode_decode(d,
                                    encoding=_encodings['fs'],
                                    errors='replace')
                writemsg(_("!!! Path contains invalid "
                 "character(s) for encoding '%s': '%s'") \
                 % (_encodings['fs'], os.path.join(parent, d)),
                 noiselevel=-1)
                if strict:
                    return 0
                dirs.remove(d_bytes)
                continue
            if d.startswith(".") or d == "CVS":
                dirs.remove(d_bytes)
        for f in files:
            try:
                f = _unicode_decode(f,
                                    encoding=_encodings['fs'],
                                    errors='strict')
            except UnicodeDecodeError:
                f = _unicode_decode(f,
                                    encoding=_encodings['fs'],
                                    errors='replace')
                if f.startswith("."):
                    continue
                f = os.path.join(parent, f)[len(filesdir) + 1:]
                writemsg(_("!!! File name contains invalid "
                 "character(s) for encoding '%s': '%s'") \
                 % (_encodings['fs'], f), noiselevel=-1)
                if strict:
                    return 0
                continue
            if f.startswith("."):
                continue
            f = os.path.join(parent, f)[len(filesdir) + 1:]
            file_type = mf.findFile(f)
            if file_type != "AUX" and not f.startswith("digest-"):
                writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
                 os.path.join(filesdir, f), noiselevel=-1)
                if strict:
                    return 0
    return 1
def digestcheck(myfiles, mysettings, strict=0, justmanifest=0):
	"""
	Verifies checksums. Assumes all files have been downloaded.
	@rtype: int
	@returns: 1 on success and 0 on failure
	"""
	if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
		return 1
	pkgdir = mysettings["O"]
	manifest_path = os.path.join(pkgdir, "Manifest")
	if not os.path.exists(manifest_path):
		writemsg(_("!!! Manifest file not found: '%s'\n") % manifest_path,
			noiselevel=-1)
		if strict:
			return 0
		else:
			return 1
	mf = Manifest(pkgdir, mysettings["DISTDIR"])
	manifest_empty = True
	for d in mf.fhashdict.values():
		if d:
			manifest_empty = False
			break
	if manifest_empty:
		writemsg(_("!!! Manifest is empty: '%s'\n") % manifest_path,
			noiselevel=-1)
		if strict:
			return 0
		else:
			return 1
	eout = EOutput()
	eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
	try:
		if strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
			eout.ebegin(_("checking ebuild checksums ;-)"))
			mf.checkTypeHashes("EBUILD")
			eout.eend(0)
			eout.ebegin(_("checking auxfile checksums ;-)"))
			mf.checkTypeHashes("AUX")
			eout.eend(0)
			eout.ebegin(_("checking miscfile checksums ;-)"))
			mf.checkTypeHashes("MISC", ignoreMissingFiles=True)
			eout.eend(0)
		for f in myfiles:
			eout.ebegin(_("checking %s ;-)") % f)
			ftype = mf.findFile(f)
			if ftype is None:
				raise KeyError(f)
			mf.checkFileHashes(ftype, f)
			eout.eend(0)
	except KeyError as e:
		eout.eend(1)
		writemsg(_("\n!!! Missing digest for %s\n") % str(e), noiselevel=-1)
		return 0
	except FileNotFound as e:
		eout.eend(1)
		writemsg(_("\n!!! A file listed in the Manifest could not be found: %s\n") % str(e),
			noiselevel=-1)
		return 0
	except DigestException as e:
		eout.eend(1)
		writemsg(_("\n!!! Digest verification failed:\n"), noiselevel=-1)
		writemsg("!!! %s\n" % e.value[0], noiselevel=-1)
		writemsg(_("!!! Reason: %s\n") % e.value[1], noiselevel=-1)
		writemsg(_("!!! Got: %s\n") % e.value[2], noiselevel=-1)
		writemsg(_("!!! Expected: %s\n") % e.value[3], noiselevel=-1)
		return 0
	# Make sure that all of the ebuilds are actually listed in the Manifest.
	glep55 = 'parse-eapi-glep-55' in mysettings.features
	for f in os.listdir(pkgdir):
		pf = None
		if glep55:
			pf, eapi = _split_ebuild_name_glep55(f)
		elif f[-7:] == '.ebuild':
			pf = f[:-7]
		if pf is not None and not mf.hasFile("EBUILD", f):
			writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
				os.path.join(pkgdir, f), noiselevel=-1)
			if strict:
				return 0
	""" epatch will just grab all the patches out of a directory, so we have to
	make sure there aren't any foreign files that it might grab."""
	filesdir = os.path.join(pkgdir, "files")

	for parent, dirs, files in os.walk(filesdir):
		try:
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='strict')
		except UnicodeDecodeError:
			parent = _unicode_decode(parent,
				encoding=_encodings['fs'], errors='replace')
			writemsg(_("!!! Path contains invalid "
				"character(s) for encoding '%s': '%s'") \
				% (_encodings['fs'], parent), noiselevel=-1)
			if strict:
				return 0
			continue
		for d in dirs:
			d_bytes = d
			try:
				d = _unicode_decode(d,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				d = _unicode_decode(d,
					encoding=_encodings['fs'], errors='replace')
				writemsg(_("!!! Path contains invalid "
					"character(s) for encoding '%s': '%s'") \
					% (_encodings['fs'], os.path.join(parent, d)),
					noiselevel=-1)
				if strict:
					return 0
				dirs.remove(d_bytes)
				continue
			if d.startswith(".") or d == "CVS":
				dirs.remove(d_bytes)
		for f in files:
			try:
				f = _unicode_decode(f,
					encoding=_encodings['fs'], errors='strict')
			except UnicodeDecodeError:
				f = _unicode_decode(f,
					encoding=_encodings['fs'], errors='replace')
				if f.startswith("."):
					continue
				f = os.path.join(parent, f)[len(filesdir) + 1:]
				writemsg(_("!!! File name contains invalid "
					"character(s) for encoding '%s': '%s'") \
					% (_encodings['fs'], f), noiselevel=-1)
				if strict:
					return 0
				continue
			if f.startswith("."):
				continue
			f = os.path.join(parent, f)[len(filesdir) + 1:]
			file_type = mf.findFile(f)
			if file_type != "AUX" and not f.startswith("digest-"):
				writemsg(_("!!! A file is not listed in the Manifest: '%s'\n") % \
					os.path.join(filesdir, f), noiselevel=-1)
				if strict:
					return 0
	return 1
Esempio n. 50
0
    def _apply_max_mtime(self, preserved_stats, entries):
        """
        Set the Manifest mtime to the max mtime of all relevant files
        and directories. Directory mtimes account for file renames and
        removals. The existing Manifest mtime accounts for eclass
        modifications that change DIST entries. This results in a
        stable/predictable mtime, which is useful when converting thin
        manifests to thick manifests for distribution via rsync. For
        portability, the mtime is set with 1 second resolution.

        @param preserved_stats: maps paths to preserved stat results
                that should be used instead of os.stat() calls
        @type preserved_stats: dict
        @param entries: list of current Manifest2Entry instances
        @type entries: list
        """

        # Use stat_result[stat.ST_MTIME] for 1 second resolution, since
        # it always rounds down. Note that stat_result.st_mtime will round
        # up from 0.999999999 to 1.0 when precision is lost during conversion
        # from nanosecond resolution to float.

        def _update_max(max_mtime, st):
            stat_mtime = st[stat.ST_MTIME]
            if max_mtime:
                return max(max_mtime, stat_mtime)

        def _stat(path):
            if path in preserved_stats:
                return preserved_stats[path]
            else:
                return os.stat(path)

        max_mtime = None
        for stat_result in preserved_stats.values():
            max_mtime = _update_max(max_mtime, stat_result)

        for entry in entries:
            if entry.type == "DIST":
                continue
            files = ""
            if entry.type == "AUX":
                files = "files"
            abs_path = os.path.join(self.pkgdir, files, entry.name)
            max_mtime = _update_max(max_mtime, _stat(abs_path))

        if not self.thin:
            # Account for changes to all relevant nested directories.
            # This is not necessary for thin manifests because
            # self.pkgdir is already included via preserved_stats.
            for parent_dir, dirs, files in os.walk(self.pkgdir.rstrip(os.sep)):
                try:
                    parent_dir = _unicode_decode(parent_dir,
                                                 encoding=_encodings["fs"],
                                                 errors="strict")
                except UnicodeDecodeError:
                    # If an absolute path cannot be decoded, then it is
                    # always excluded from the manifest (repoman will
                    # report such problems).
                    pass
                else:
                    max_mtime = _update_max(max_mtime, _stat(parent_dir))

        if max_mtime is not None:
            for path in preserved_stats:
                try:
                    os.utime(path, (max_mtime, max_mtime))
                except OSError as e:
                    # Even though we have write permission, utime fails
                    # with EPERM if path is owned by a different user.
                    # Only warn in this case, since it's not a problem
                    # unless this repo is being prepared for distribution
                    # via rsync.
                    writemsg_level(
                        f"!!! utime('{path}', ({max_mtime}, {max_mtime})): {e}\n",
                        level=logging.WARNING,
                        noiselevel=-1,
                    )
Esempio n. 51
0
    def create(self,
               checkExisting=False,
               assumeDistHashesSometimes=False,
               assumeDistHashesAlways=False,
               requiredDistfiles=[]):
        """ Recreate this Manifest from scratch.  This will not use any
		existing checksums unless assumeDistHashesSometimes or
		assumeDistHashesAlways is true (assumeDistHashesSometimes will only
		cause DIST checksums to be reused if the file doesn't exist in
		DISTDIR).  The requiredDistfiles parameter specifies a list of
		distfiles to raise a FileNotFound exception for (if no file or existing
		checksums are available), and defaults to all distfiles when not
		specified."""
        if checkExisting:
            self.checkAllHashes()
        if assumeDistHashesSometimes or assumeDistHashesAlways:
            distfilehashes = self.fhashdict["DIST"]
        else:
            distfilehashes = {}
        self.__init__(self.pkgdir,
                      self.distdir,
                      fetchlist_dict=self.fetchlist_dict,
                      from_scratch=True,
                      manifest1_compat=False)
        cpvlist = []
        pn = os.path.basename(self.pkgdir.rstrip(os.path.sep))
        cat = self._pkgdir_category()

        pkgdir = self.pkgdir

        for pkgdir, pkgdir_dirs, pkgdir_files in os.walk(pkgdir):
            break
        for f in pkgdir_files:
            try:
                f = _unicode_decode(f,
                                    encoding=_encodings['fs'],
                                    errors='strict')
            except UnicodeDecodeError:
                continue
            if f[:1] == ".":
                continue
            pf = None
            if f[-7:] == '.ebuild':
                pf = f[:-7]
            if pf is not None:
                mytype = "EBUILD"
                ps = portage.versions._pkgsplit(pf)
                cpv = "%s/%s" % (cat, pf)
                if not ps:
                    raise PortagePackageException(
                        _("Invalid package name: '%s'") % cpv)
                if ps[0] != pn:
                    raise PortagePackageException(
                        _("Package name does not "
                          "match directory name: '%s'") % cpv)
                cpvlist.append(cpv)
            elif manifest2MiscfileFilter(f):
                mytype = "MISC"
            else:
                continue
            self.fhashdict[mytype][f] = perform_multiple_checksums(
                self.pkgdir + f, self.hashes)
        recursive_files = []

        pkgdir = self.pkgdir
        cut_len = len(os.path.join(pkgdir, "files") + os.sep)
        for parentdir, dirs, files in os.walk(os.path.join(pkgdir, "files")):
            for f in files:
                try:
                    f = _unicode_decode(f,
                                        encoding=_encodings['fs'],
                                        errors='strict')
                except UnicodeDecodeError:
                    continue
                full_path = os.path.join(parentdir, f)
                recursive_files.append(full_path[cut_len:])
        for f in recursive_files:
            if not manifest2AuxfileFilter(f):
                continue
            self.fhashdict["AUX"][f] = perform_multiple_checksums(
                os.path.join(self.pkgdir, "files", f.lstrip(os.sep)),
                self.hashes)
        distlist = set()
        for cpv in cpvlist:
            distlist.update(self._getCpvDistfiles(cpv))
        if requiredDistfiles is None:
            # This allows us to force removal of stale digests for the
            # ebuild --force digest option (no distfiles are required).
            requiredDistfiles = set()
        elif len(requiredDistfiles) == 0:
            # repoman passes in an empty list, which implies that all distfiles
            # are required.
            requiredDistfiles = distlist.copy()
        required_hash_types = set()
        required_hash_types.add("size")
        required_hash_types.add(portage.const.MANIFEST2_REQUIRED_HASH)
        for f in distlist:
            fname = os.path.join(self.distdir, f)
            mystat = None
            try:
                mystat = os.stat(fname)
            except OSError:
                pass
            if f in distfilehashes and \
             not required_hash_types.difference(distfilehashes[f]) and \
             ((assumeDistHashesSometimes and mystat is None) or \
             (assumeDistHashesAlways and mystat is None) or \
             (assumeDistHashesAlways and mystat is not None and \
             len(distfilehashes[f]) == len(self.hashes) and \
             distfilehashes[f]["size"] == mystat.st_size)):
                self.fhashdict["DIST"][f] = distfilehashes[f]
            else:
                try:
                    self.fhashdict["DIST"][f] = perform_multiple_checksums(
                        fname, self.hashes)
                except FileNotFound:
                    if f in requiredDistfiles:
                        raise
Esempio n. 52
0
	def _populate_local(self):
		self.dbapi.clear()
		_instance_key = self.dbapi._instance_key
		# In order to minimize disk I/O, we never compute digests here.
		# Therefore we exclude hashes from the minimum_keys, so that
		# the Packages file will not be needlessly re-written due to
		# missing digests.
		minimum_keys = self._pkgindex_keys.difference(self._pkgindex_hashes)
		if True:
			pkg_paths = {}
			self._pkg_paths = pkg_paths
			dir_files = {}
			for parent, dir_names, file_names in os.walk(self.pkgdir):
				relative_parent = parent[len(self.pkgdir)+1:]
				dir_files[relative_parent] = file_names

			pkgindex = self._load_pkgindex()
			if not self._pkgindex_version_supported(pkgindex):
				pkgindex = self._new_pkgindex()
			metadata = {}
			basename_index = {}
			for d in pkgindex.packages:
				cpv = _pkg_str(d["CPV"], metadata=d,
					settings=self.settings)
				d["CPV"] = cpv
				metadata[_instance_key(cpv)] = d
				path = d.get("PATH")
				if not path:
					path = cpv + ".tbz2"
				basename = os.path.basename(path)
				basename_index.setdefault(basename, []).append(d)

			update_pkgindex = False
			for mydir, file_names in dir_files.items():
				try:
					mydir = _unicode_decode(mydir,
						encoding=_encodings["fs"], errors="strict")
				except UnicodeDecodeError:
					continue
				for myfile in file_names:
					try:
						myfile = _unicode_decode(myfile,
							encoding=_encodings["fs"], errors="strict")
					except UnicodeDecodeError:
						continue
					if not myfile.endswith(SUPPORTED_XPAK_EXTENSIONS):
						continue
					mypath = os.path.join(mydir, myfile)
					full_path = os.path.join(self.pkgdir, mypath)
					s = os.lstat(full_path)

					if not stat.S_ISREG(s.st_mode):
						continue

					# Validate data from the package index and try to avoid
					# reading the xpak if possible.
					possibilities = basename_index.get(myfile)
					if possibilities:
						match = None
						for d in possibilities:
							try:
								if long(d["_mtime_"]) != s[stat.ST_MTIME]:
									continue
							except (KeyError, ValueError):
								continue
							try:
								if long(d["SIZE"]) != long(s.st_size):
									continue
							except (KeyError, ValueError):
								continue
							if not minimum_keys.difference(d):
								match = d
								break
						if match:
							mycpv = match["CPV"]
							instance_key = _instance_key(mycpv)
							pkg_paths[instance_key] = mypath
							# update the path if the package has been moved
							oldpath = d.get("PATH")
							if oldpath and oldpath != mypath:
								update_pkgindex = True
							# Omit PATH if it is the default path for
							# the current Packages format version.
							if mypath != mycpv + ".tbz2":
								d["PATH"] = mypath
								if not oldpath:
									update_pkgindex = True
							else:
								d.pop("PATH", None)
								if oldpath:
									update_pkgindex = True
							self.dbapi.cpv_inject(mycpv)
							continue
					if not os.access(full_path, os.R_OK):
						writemsg(_("!!! Permission denied to read " \
							"binary package: '%s'\n") % full_path,
							noiselevel=-1)
						self.invalids.append(myfile[:-5])
						continue
					pkg_metadata = self._read_metadata(full_path, s,
						keys=chain(self.dbapi._aux_cache_keys,
						("PF", "CATEGORY")))
					mycat = pkg_metadata.get("CATEGORY", "")
					mypf = pkg_metadata.get("PF", "")
					slot = pkg_metadata.get("SLOT", "")
					mypkg = myfile[:-5]
					if not mycat or not mypf or not slot:
						#old-style or corrupt package
						writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
							noiselevel=-1)
						missing_keys = []
						if not mycat:
							missing_keys.append("CATEGORY")
						if not mypf:
							missing_keys.append("PF")
						if not slot:
							missing_keys.append("SLOT")
						msg = []
						if missing_keys:
							missing_keys.sort()
							msg.append(_("Missing metadata key(s): %s.") % \
								", ".join(missing_keys))
						msg.append(_(" This binary package is not " \
							"recoverable and should be deleted."))
						for line in textwrap.wrap("".join(msg), 72):
							writemsg("!!! %s\n" % line, noiselevel=-1)
						self.invalids.append(mypkg)
						continue

					multi_instance = False
					invalid_name = False
					build_id = None
					if myfile.endswith(".xpak"):
						multi_instance = True
						build_id = self._parse_build_id(myfile)
						if build_id < 1:
							invalid_name = True
						elif myfile != "%s-%s.xpak" % (
							mypf, build_id):
							invalid_name = True
						else:
							mypkg = mypkg[:-len(str(build_id))-1]
					elif myfile != mypf + ".tbz2":
						invalid_name = True

					if invalid_name:
						writemsg(_("\n!!! Binary package name is "
							"invalid: '%s'\n") % full_path,
							noiselevel=-1)
						continue

					if pkg_metadata.get("BUILD_ID"):
						try:
							build_id = long(pkg_metadata["BUILD_ID"])
						except ValueError:
							writemsg(_("!!! Binary package has "
								"invalid BUILD_ID: '%s'\n") %
								full_path, noiselevel=-1)
							continue
					else:
						build_id = None

					if multi_instance:
						name_split = catpkgsplit("%s/%s" %
							(mycat, mypf))
						if (name_split is None or
							tuple(catsplit(mydir)) != name_split[:2]):
							continue
					elif mycat != mydir and mydir != "All":
						continue
					if mypkg != mypf.strip():
						continue
					mycpv = mycat + "/" + mypkg
					if not self.dbapi._category_re.match(mycat):
						writemsg(_("!!! Binary package has an " \
							"unrecognized category: '%s'\n") % full_path,
							noiselevel=-1)
						writemsg(_("!!! '%s' has a category that is not" \
							" listed in %setc/portage/categories\n") % \
							(mycpv, self.settings["PORTAGE_CONFIGROOT"]),
							noiselevel=-1)
						continue
					if build_id is not None:
						pkg_metadata["BUILD_ID"] = _unicode(build_id)
					pkg_metadata["SIZE"] = _unicode(s.st_size)
					# Discard items used only for validation above.
					pkg_metadata.pop("CATEGORY")
					pkg_metadata.pop("PF")
					mycpv = _pkg_str(mycpv,
						metadata=self.dbapi._aux_cache_slot_dict(
						pkg_metadata))
					pkg_paths[_instance_key(mycpv)] = mypath
					self.dbapi.cpv_inject(mycpv)
					update_pkgindex = True
					d = metadata.get(_instance_key(mycpv),
						pkgindex._pkg_slot_dict())
					if d:
						try:
							if long(d["_mtime_"]) != s[stat.ST_MTIME]:
								d.clear()
						except (KeyError, ValueError):
							d.clear()
					if d:
						try:
							if long(d["SIZE"]) != long(s.st_size):
								d.clear()
						except (KeyError, ValueError):
							d.clear()

					for k in self._pkgindex_allowed_pkg_keys:
						v = pkg_metadata.get(k)
						if v:
							d[k] = v
					d["CPV"] = mycpv

					try:
						self._eval_use_flags(mycpv, d)
					except portage.exception.InvalidDependString:
						writemsg(_("!!! Invalid binary package: '%s'\n") % \
							self.getname(mycpv), noiselevel=-1)
						self.dbapi.cpv_remove(mycpv)
						del pkg_paths[_instance_key(mycpv)]

					# record location if it's non-default
					if mypath != mycpv + ".tbz2":
						d["PATH"] = mypath
					else:
						d.pop("PATH", None)
					metadata[_instance_key(mycpv)] = d

			for instance_key in list(metadata):
				if instance_key not in pkg_paths:
					del metadata[instance_key]

			if update_pkgindex:
				del pkgindex.packages[:]
				pkgindex.packages.extend(iter(metadata.values()))
				self._update_pkgindex_header(pkgindex.header)

		return pkgindex if update_pkgindex else None
Esempio n. 53
0
def update_config_files(config_root, protect, protect_mask, update_iter, match_callback = None):
	"""Perform global updates on /etc/portage/package.*, /etc/portage/profile/package.*,
	/etc/portage/profile/packages and /etc/portage/sets.
	config_root - location of files to update
	protect - list of paths from CONFIG_PROTECT
	protect_mask - list of paths from CONFIG_PROTECT_MASK
	update_iter - list of update commands as returned from parse_updates(),
		or dict of {repo_name: list}
	match_callback - a callback which will be called with three arguments:
		match_callback(repo_name, old_atom, new_atom)
	and should return boolean value determining whether to perform the update"""

	repo_dict = None
	if isinstance(update_iter, dict):
		repo_dict = update_iter
	if match_callback is None:
		def match_callback(repo_name, atoma, atomb):
			return True
	config_root = normalize_path(config_root)
	update_files = {}
	file_contents = {}
	myxfiles = [
		"package.accept_keywords", "package.env",
		"package.keywords", "package.license",
		"package.mask", "package.properties",
		"package.unmask", "package.use", "sets"
	]
	myxfiles += [os.path.join("profile", x) for x in (
		"packages", "package.accept_keywords",
		"package.keywords", "package.mask",
		"package.unmask", "package.use",
		"package.use.force", "package.use.mask",
		"package.use.stable.force", "package.use.stable.mask"
	)]
	abs_user_config = os.path.join(config_root, USER_CONFIG_PATH)
	recursivefiles = []
	for x in myxfiles:
		config_file = os.path.join(abs_user_config, x)
		if os.path.isdir(config_file):
			for parent, dirs, files in os.walk(config_file):
				try:
					parent = _unicode_decode(parent,
						encoding=_encodings['fs'], errors='strict')
				except UnicodeDecodeError:
					continue
				for y_enc in list(dirs):
					try:
						y = _unicode_decode(y_enc,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						dirs.remove(y_enc)
						continue
					if y.startswith(".") or y in VCS_DIRS:
						dirs.remove(y_enc)
				for y in files:
					try:
						y = _unicode_decode(y,
							encoding=_encodings['fs'], errors='strict')
					except UnicodeDecodeError:
						continue
					if y.startswith("."):
						continue
					recursivefiles.append(
						os.path.join(parent, y)[len(abs_user_config) + 1:])
		else:
			recursivefiles.append(x)
	myxfiles = recursivefiles
	for x in myxfiles:
		f = None
		try:
			f = io.open(
				_unicode_encode(os.path.join(abs_user_config, x),
				encoding=_encodings['fs'], errors='strict'),
				mode='r', encoding=_encodings['content'],
				errors='replace')
			file_contents[x] = f.readlines()
		except IOError:
			continue
		finally:
			if f is not None:
				f.close()

	ignore_line_re = re.compile(r'^#|^\s*$')
	if repo_dict is None:
		update_items = [(None, update_iter)]
	else:
		update_items = [x for x in repo_dict.items() if x[0] != 'DEFAULT']
	for repo_name, update_iter in update_items:
		for update_cmd in update_iter:
			for x, contents in file_contents.items():
				skip_next = False
				for pos, line in enumerate(contents):
					if skip_next:
						skip_next = False
						continue
					if ignore_line_re.match(line):
						continue
					atom = line.split()[0]
					if atom[:1] == "-":
						# package.mask supports incrementals
						atom = atom[1:]
					if atom[:1] == "*":
						# packages file supports "*"-prefixed atoms as indication of system packages.
						atom = atom[1:]
					if not isvalidatom(atom):
						continue
					new_atom = update_dbentry(update_cmd, atom)
					if atom != new_atom:
						if match_callback(repo_name, atom, new_atom):
							# add a comment with the update command, so
							# the user can clearly see what happened
							contents[pos] = "# %s\n" % \
								" ".join("%s" % (x,) for x in update_cmd)
							contents.insert(pos + 1,
								line.replace("%s" % (atom,),
								"%s" % (new_atom,), 1))
							# we've inserted an additional line, so we need to
							# skip it when it's reached in the next iteration
							skip_next = True
							update_files[x] = 1
							sys.stdout.write("p")
							sys.stdout.flush()

	protect_obj = ConfigProtect(
		config_root, protect, protect_mask)
	for x in update_files:
		updating_file = os.path.join(abs_user_config, x)
		if protect_obj.isprotected(updating_file):
			updating_file = new_protect_filename(updating_file)
		try:
			write_atomic(updating_file, "".join(file_contents[x]))
		except PortageException as e:
			writemsg("\n!!! %s\n" % str(e), noiselevel=-1)
			writemsg(_("!!! An error occurred while updating a config file:") + \
				" '%s'\n" % updating_file, noiselevel=-1)
			continue
Esempio n. 54
0
	def _populate(self, getbinpkgs=0):
		if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
			return 0

		# Clear all caches in case populate is called multiple times
		# as may be the case when _global_updates calls populate()
		# prior to performing package moves since it only wants to
		# operate on local packages (getbinpkgs=0).
		self._remotepkgs = None
		self.dbapi.clear()
		_instance_key = self.dbapi._instance_key
		if True:
			pkg_paths = {}
			self._pkg_paths = pkg_paths
			dir_files = {}
			for parent, dir_names, file_names in os.walk(self.pkgdir):
				relative_parent = parent[len(self.pkgdir)+1:]
				dir_files[relative_parent] = file_names

			pkgindex = self._load_pkgindex()
			if not self._pkgindex_version_supported(pkgindex):
				pkgindex = self._new_pkgindex()
			header = pkgindex.header
			metadata = {}
			basename_index = {}
			for d in pkgindex.packages:
				cpv = _pkg_str(d["CPV"], metadata=d,
					settings=self.settings)
				d["CPV"] = cpv
				metadata[_instance_key(cpv)] = d
				path = d.get("PATH")
				if not path:
					path = cpv + ".tbz2"
				basename = os.path.basename(path)
				basename_index.setdefault(basename, []).append(d)

			update_pkgindex = False
			for mydir, file_names in dir_files.items():
				try:
					mydir = _unicode_decode(mydir,
						encoding=_encodings["fs"], errors="strict")
				except UnicodeDecodeError:
					continue
				for myfile in file_names:
					try:
						myfile = _unicode_decode(myfile,
							encoding=_encodings["fs"], errors="strict")
					except UnicodeDecodeError:
						continue
					if not myfile.endswith(SUPPORTED_XPAK_EXTENSIONS):
						continue
					mypath = os.path.join(mydir, myfile)
					full_path = os.path.join(self.pkgdir, mypath)
					s = os.lstat(full_path)

					if not stat.S_ISREG(s.st_mode):
						continue

					# Validate data from the package index and try to avoid
					# reading the xpak if possible.
					possibilities = basename_index.get(myfile)
					if possibilities:
						match = None
						for d in possibilities:
							try:
								if long(d["_mtime_"]) != s[stat.ST_MTIME]:
									continue
							except (KeyError, ValueError):
								continue
							try:
								if long(d["SIZE"]) != long(s.st_size):
									continue
							except (KeyError, ValueError):
								continue
							if not self._pkgindex_keys.difference(d):
								match = d
								break
						if match:
							mycpv = match["CPV"]
							instance_key = _instance_key(mycpv)
							pkg_paths[instance_key] = mypath
							# update the path if the package has been moved
							oldpath = d.get("PATH")
							if oldpath and oldpath != mypath:
								update_pkgindex = True
							# Omit PATH if it is the default path for
							# the current Packages format version.
							if mypath != mycpv + ".tbz2":
								d["PATH"] = mypath
								if not oldpath:
									update_pkgindex = True
							else:
								d.pop("PATH", None)
								if oldpath:
									update_pkgindex = True
							self.dbapi.cpv_inject(mycpv)
							continue
					if not os.access(full_path, os.R_OK):
						writemsg(_("!!! Permission denied to read " \
							"binary package: '%s'\n") % full_path,
							noiselevel=-1)
						self.invalids.append(myfile[:-5])
						continue
					pkg_metadata = self._read_metadata(full_path, s,
						keys=chain(self.dbapi._aux_cache_keys,
						("PF", "CATEGORY")))
					mycat = pkg_metadata.get("CATEGORY", "")
					mypf = pkg_metadata.get("PF", "")
					slot = pkg_metadata.get("SLOT", "")
					mypkg = myfile[:-5]
					if not mycat or not mypf or not slot:
						#old-style or corrupt package
						writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
							noiselevel=-1)
						missing_keys = []
						if not mycat:
							missing_keys.append("CATEGORY")
						if not mypf:
							missing_keys.append("PF")
						if not slot:
							missing_keys.append("SLOT")
						msg = []
						if missing_keys:
							missing_keys.sort()
							msg.append(_("Missing metadata key(s): %s.") % \
								", ".join(missing_keys))
						msg.append(_(" This binary package is not " \
							"recoverable and should be deleted."))
						for line in textwrap.wrap("".join(msg), 72):
							writemsg("!!! %s\n" % line, noiselevel=-1)
						self.invalids.append(mypkg)
						continue

					multi_instance = False
					invalid_name = False
					build_id = None
					if myfile.endswith(".xpak"):
						multi_instance = True
						build_id = self._parse_build_id(myfile)
						if build_id < 1:
							invalid_name = True
						elif myfile != "%s-%s.xpak" % (
							mypf, build_id):
							invalid_name = True
						else:
							mypkg = mypkg[:-len(str(build_id))-1]
					elif myfile != mypf + ".tbz2":
						invalid_name = True

					if invalid_name:
						writemsg(_("\n!!! Binary package name is "
							"invalid: '%s'\n") % full_path,
							noiselevel=-1)
						continue

					if pkg_metadata.get("BUILD_ID"):
						try:
							build_id = long(pkg_metadata["BUILD_ID"])
						except ValueError:
							writemsg(_("!!! Binary package has "
								"invalid BUILD_ID: '%s'\n") %
								full_path, noiselevel=-1)
							continue
					else:
						build_id = None

					if multi_instance:
						name_split = catpkgsplit("%s/%s" %
							(mycat, mypf))
						if (name_split is None or
							tuple(catsplit(mydir)) != name_split[:2]):
							continue
					elif mycat != mydir and mydir != "All":
						continue
					if mypkg != mypf.strip():
						continue
					mycpv = mycat + "/" + mypkg
					if not self.dbapi._category_re.match(mycat):
						writemsg(_("!!! Binary package has an " \
							"unrecognized category: '%s'\n") % full_path,
							noiselevel=-1)
						writemsg(_("!!! '%s' has a category that is not" \
							" listed in %setc/portage/categories\n") % \
							(mycpv, self.settings["PORTAGE_CONFIGROOT"]),
							noiselevel=-1)
						continue
					if build_id is not None:
						pkg_metadata["BUILD_ID"] = _unicode(build_id)
					pkg_metadata["SIZE"] = _unicode(s.st_size)
					# Discard items used only for validation above.
					pkg_metadata.pop("CATEGORY")
					pkg_metadata.pop("PF")
					mycpv = _pkg_str(mycpv,
						metadata=self.dbapi._aux_cache_slot_dict(
						pkg_metadata))
					pkg_paths[_instance_key(mycpv)] = mypath
					self.dbapi.cpv_inject(mycpv)
					update_pkgindex = True
					d = metadata.get(_instance_key(mycpv),
						pkgindex._pkg_slot_dict())
					if d:
						try:
							if long(d["_mtime_"]) != s[stat.ST_MTIME]:
								d.clear()
						except (KeyError, ValueError):
							d.clear()
					if d:
						try:
							if long(d["SIZE"]) != long(s.st_size):
								d.clear()
						except (KeyError, ValueError):
							d.clear()

					for k in self._pkgindex_allowed_pkg_keys:
						v = pkg_metadata.get(k)
						if v is not None:
							d[k] = v
					d["CPV"] = mycpv

					try:
						self._eval_use_flags(mycpv, d)
					except portage.exception.InvalidDependString:
						writemsg(_("!!! Invalid binary package: '%s'\n") % \
							self.getname(mycpv), noiselevel=-1)
						self.dbapi.cpv_remove(mycpv)
						del pkg_paths[_instance_key(mycpv)]

					# record location if it's non-default
					if mypath != mycpv + ".tbz2":
						d["PATH"] = mypath
					else:
						d.pop("PATH", None)
					metadata[_instance_key(mycpv)] = d

			for instance_key in list(metadata):
				if instance_key not in pkg_paths:
					del metadata[instance_key]

			# Do not bother to write the Packages index if $PKGDIR/All/ exists
			# since it will provide no benefit due to the need to read CATEGORY
			# from xpak.
			if update_pkgindex and os.access(self.pkgdir, os.W_OK):
				del pkgindex.packages[:]
				pkgindex.packages.extend(iter(metadata.values()))
				self._update_pkgindex_header(pkgindex.header)
				self._pkgindex_write(pkgindex)

		if getbinpkgs and not self.settings.get("PORTAGE_BINHOST"):
			writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
				noiselevel=-1)

		if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
			self.populated=1
			return
		self._remotepkgs = {}
		for base_url in self.settings["PORTAGE_BINHOST"].split():
			parsed_url = urlparse(base_url)
			host = parsed_url.netloc
			port = parsed_url.port
			user = None
			passwd = None
			user_passwd = ""
			if "@" in host:
				user, host = host.split("@", 1)
				user_passwd = user + "@"
				if ":" in user:
					user, passwd = user.split(":", 1)
			port_args = []
			if port is not None:
				port_str = ":%s" % (port,)
				if host.endswith(port_str):
					host = host[:-len(port_str)]
			pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
				host, parsed_url.path.lstrip("/"), "Packages")
			pkgindex = self._new_pkgindex()
			try:
				f = io.open(_unicode_encode(pkgindex_file,
					encoding=_encodings['fs'], errors='strict'),
					mode='r', encoding=_encodings['repo.content'],
					errors='replace')
				try:
					pkgindex.read(f)
				finally:
					f.close()
			except EnvironmentError as e:
				if e.errno != errno.ENOENT:
					raise
			local_timestamp = pkgindex.header.get("TIMESTAMP", None)
			try:
				download_timestamp = \
					float(pkgindex.header.get("DOWNLOAD_TIMESTAMP", 0))
			except ValueError:
				download_timestamp = 0
			remote_timestamp = None
			rmt_idx = self._new_pkgindex()
			proc = None
			tmp_filename = None
			try:
				# urlparse.urljoin() only works correctly with recognized
				# protocols and requires the base url to have a trailing
				# slash, so join manually...
				url = base_url.rstrip("/") + "/Packages"
				f = None

				try:
					ttl = float(pkgindex.header.get("TTL", 0))
				except ValueError:
					pass
				else:
					if download_timestamp and ttl and \
						download_timestamp + ttl > time.time():
						raise UseCachedCopyOfRemoteIndex()

				# Don't use urlopen for https, since it doesn't support
				# certificate/hostname verification (bug #469888).
				if parsed_url.scheme not in ('https',):
					try:
						f = _urlopen(url, if_modified_since=local_timestamp)
						if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
							remote_timestamp = f.headers.get('timestamp')
					except IOError as err:
						if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
							raise UseCachedCopyOfRemoteIndex()

						if parsed_url.scheme in ('ftp', 'http', 'https'):
							# This protocol is supposedly supported by urlopen,
							# so apparently there's a problem with the url
							# or a bug in urlopen.
							if self.settings.get("PORTAGE_DEBUG", "0") != "0":
								traceback.print_exc()

							raise
					except ValueError:
						raise ParseError("Invalid Portage BINHOST value '%s'"
										 % url.lstrip())

				if f is None:

					path = parsed_url.path.rstrip("/") + "/Packages"

					if parsed_url.scheme == 'ssh':
						# Use a pipe so that we can terminate the download
						# early if we detect that the TIMESTAMP header
						# matches that of the cached Packages file.
						ssh_args = ['ssh']
						if port is not None:
							ssh_args.append("-p%s" % (port,))
						# NOTE: shlex evaluates embedded quotes
						ssh_args.extend(portage.util.shlex_split(
							self.settings.get("PORTAGE_SSH_OPTS", "")))
						ssh_args.append(user_passwd + host)
						ssh_args.append('--')
						ssh_args.append('cat')
						ssh_args.append(path)

						proc = subprocess.Popen(ssh_args,
							stdout=subprocess.PIPE)
						f = proc.stdout
					else:
						setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
						fcmd = self.settings.get(setting)
						if not fcmd:
							fcmd = self.settings.get('FETCHCOMMAND')
							if not fcmd:
								raise EnvironmentError("FETCHCOMMAND is unset")

						fd, tmp_filename = tempfile.mkstemp()
						tmp_dirname, tmp_basename = os.path.split(tmp_filename)
						os.close(fd)

						fcmd_vars = {
							"DISTDIR": tmp_dirname,
							"FILE": tmp_basename,
							"URI": url
						}

						for k in ("PORTAGE_SSH_OPTS",):
							v = self.settings.get(k)
							if v is not None:
								fcmd_vars[k] = v

						success = portage.getbinpkg.file_get(
							fcmd=fcmd, fcmd_vars=fcmd_vars)
						if not success:
							raise EnvironmentError("%s failed" % (setting,))
						f = open(tmp_filename, 'rb')

				f_dec = codecs.iterdecode(f,
					_encodings['repo.content'], errors='replace')
				try:
					rmt_idx.readHeader(f_dec)
					if not remote_timestamp: # in case it had not been read from HTTP header
						remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
					if not remote_timestamp:
						# no timestamp in the header, something's wrong
						pkgindex = None
						writemsg(_("\n\n!!! Binhost package index " \
						" has no TIMESTAMP field.\n"), noiselevel=-1)
					else:
						if not self._pkgindex_version_supported(rmt_idx):
							writemsg(_("\n\n!!! Binhost package index version" \
							" is not supported: '%s'\n") % \
							rmt_idx.header.get("VERSION"), noiselevel=-1)
							pkgindex = None
						elif local_timestamp != remote_timestamp:
							rmt_idx.readBody(f_dec)
							pkgindex = rmt_idx
				finally:
					# Timeout after 5 seconds, in case close() blocks
					# indefinitely (see bug #350139).
					try:
						try:
							AlarmSignal.register(5)
							f.close()
						finally:
							AlarmSignal.unregister()
					except AlarmSignal:
						writemsg("\n\n!!! %s\n" % \
							_("Timed out while closing connection to binhost"),
							noiselevel=-1)
			except UseCachedCopyOfRemoteIndex:
				writemsg_stdout("\n")
				writemsg_stdout(
					colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
					"\n")
				rmt_idx = pkgindex
			except EnvironmentError as e:
				writemsg(_("\n\n!!! Error fetching binhost package" \
					" info from '%s'\n") % _hide_url_passwd(base_url))
				# With Python 2, the EnvironmentError message may
				# contain bytes or unicode, so use _unicode to ensure
				# safety with all locales (bug #532784).
				try:
					error_msg = _unicode(e)
				except UnicodeDecodeError as uerror:
					error_msg = _unicode(uerror.object,
						encoding='utf_8', errors='replace')
				writemsg("!!! %s\n\n" % error_msg)
				del e
				pkgindex = None
			if proc is not None:
				if proc.poll() is None:
					proc.kill()
					proc.wait()
				proc = None
			if tmp_filename is not None:
				try:
					os.unlink(tmp_filename)
				except OSError:
					pass
			if pkgindex is rmt_idx:
				pkgindex.modified = False # don't update the header
				pkgindex.header["DOWNLOAD_TIMESTAMP"] = "%d" % time.time()
				try:
					ensure_dirs(os.path.dirname(pkgindex_file))
					f = atomic_ofstream(pkgindex_file)
					pkgindex.write(f)
					f.close()
				except (IOError, PortageException):
					if os.access(os.path.dirname(pkgindex_file), os.W_OK):
						raise
					# The current user doesn't have permission to cache the
					# file, but that's alright.
			if pkgindex:
				remote_base_uri = pkgindex.header.get("URI", base_url)
				for d in pkgindex.packages:
					cpv = _pkg_str(d["CPV"], metadata=d,
						settings=self.settings)
					instance_key = _instance_key(cpv)
					# Local package instances override remote instances
					# with the same instance_key.
					if instance_key in metadata:
						continue

					d["CPV"] = cpv
					d["BASE_URI"] = remote_base_uri
					d["PKGINDEX_URI"] = url
					self._remotepkgs[instance_key] = d
					metadata[instance_key] = d
					self.dbapi.cpv_inject(cpv)

				self._remote_has_index = True

		self.populated=1