Exemple #1
0
	def _vcs_autoadd(self):
		myunadded = self.vcs_settings.changes.unadded
		myautoadd = []
		if myunadded:
			for x in range(len(myunadded) - 1, -1, -1):
				xs = myunadded[x].split("/")
				if self.repo_settings.repo_config.find_invalid_path_char(myunadded[x]) != -1:
					# The Manifest excludes this file,
					# so it's safe to ignore.
					del myunadded[x]
				elif xs[-1] == "files":
					print("!!! files dir is not added! Please correct this.")
					sys.exit(-1)
				elif xs[-1] == "Manifest":
					# It's a manifest... auto add
					myautoadd += [myunadded[x]]
					del myunadded[x]

		if myunadded:
			print(red(
				"!!! The following files are in your local tree"
				" but are not added to the master"))
			print(red(
				"!!! tree. Please remove them from the local tree"
				" or add them to the master tree."))
			for x in myunadded:
				print("   ", x)
			print()
			print()
			sys.exit(1)
		return myautoadd
Exemple #2
0
def get_best_match(cpv, cp, logger):
	"""Tries to find another version of the pkg with the same slot
	as the deprecated installed version.  Failing that attempt to get any version
	of the same app

	@param cpv: string
	@param cp: string
	@rtype tuple: ([cpv,...], SLOT)
	"""

	slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"])[0]
	logger.warning('\t%s "%s" %s.' % (yellow('* Warning:'), cpv,bold('ebuild not found.')))
	logger.debug('\tget_best_match(); Looking for %s:%s' %(cp, slot))
	try:
		match = portdb.match('%s:%s' %(cp, slot))
	except portage.exception.InvalidAtom:
		match = None

	if not match:
		logger.warning('\t' + red('!!') + ' ' + yellow(
			'Could not find ebuild for %s:%s' %(cp, slot)))
		slot = ['']
		match = portdb.match(cp)
		if not match:
			logger.warning('\t' + red('!!') + ' ' +
				yellow('Could not find ebuild for ' + cp))
	return match, slot
Exemple #3
0
	def check(self, checkdir, repolevel):
		'''Runs checks on the package metadata.xml file

		@param checkdir: string, path
		@param repolevel: integer
		@return boolean, False == bad metadata
		'''
		if not self.capable:
			if self.options.xml_parse or repolevel == 3:
				print("%s sorry, xmllint is needed.  failing\n" % red("!!!"))
				sys.exit(1)
			return True
		# xmlint can produce garbage output even on success, so only dump
		# the ouput when it fails.
		st, out = repoman_getstatusoutput(
			self.binary + " --nonet --noout --dtdvalid %s %s" % (
				portage._shell_quote(self.metadata_dtd),
				portage._shell_quote(
					os.path.join(checkdir, "metadata.xml"))))
		if st != os.EX_OK:
			print(red("!!!") + " metadata.xml is invalid:")
			for z in out.splitlines():
				print(red("!!! ") + z)
			return False
		return True
Exemple #4
0
    def detect_conflicts(options):
        """Determine if the checkout has problems like cvs conflicts.

		If you want more vcs support here just keep adding if blocks...
		This could be better.

		TODO(antarus): Also this should probably not call sys.exit() as
		repoman is run on >1 packages and one failure should not cause
		subsequent packages to fail.

		Args:
			vcs - A string identifying the version control system in use
		Returns: boolean
			(calls sys.exit on fatal problems)
		"""

        cmd = "svn status -u 2>&1 | egrep -v '^.  +.*/digest-[^/]+' | head -n-1"
        msg = "Performing a %s with a little magic grep to check for updates." % green("svn status -u")

        logging.info(msg)
        # Use Popen instead of getstatusoutput(), in order to avoid
        # unicode handling problems (see bug #310789).
        args = [BASH_BINARY, "-c", cmd]
        args = [_unicode_encode(x) for x in args]
        proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
        out = _unicode_decode(proc.communicate()[0])
        proc.wait()
        mylines = out.splitlines()
        myupdates = []
        for line in mylines:
            if not line:
                continue

                # [ ] Unmodified (SVN)	[U] Updates		[P] Patches
                # [M] Modified			[A] Added		[R] Removed / Replaced
                # [D] Deleted
            if line[0] not in " UPMARD":
                # Stray Manifest is fine, we will readd it anyway.
                if line[0] == "?" and line[1:].lstrip() == "Manifest":
                    continue
                logging.error(
                    red("!!! Please fix the following issues reported " "from cvs: %s" % green("(U,P,M,A,R,D are ok)"))
                )
                logging.error(red("!!! Note: This is a pretend/no-modify pass..."))
                logging.error(out)
                sys.exit(1)
            elif line[8] == "*":
                myupdates.append(line[9:].lstrip(" 1234567890"))

        if myupdates:
            logging.info(green("Fetching trivial updates..."))
            if options.pretend:
                logging.info("(svn update " + " ".join(myupdates) + ")")
                retval = os.EX_OK
            else:
                retval = os.system("svn update " + " ".join(myupdates))
            if retval != os.EX_OK:
                logging.fatal("!!! svn exited with an error. Terminating.")
                sys.exit(retval)
        return False
Exemple #5
0
 def check_gui_bindings(self):
     "check for availability of SoGui bindings and removes the not available ones"
     if sys.platform == "win32":
         self.MODULES.pop('soxt', None)
         self.MODULES.pop('sogtk', None)
         print(blue("Checking for SoWin..."))
         if not os.path.exists(
                 os.path.join(os.getenv("COINDIR"), "include", "Inventor",
                              "Win", "SoWin.h")):
             self.MODULES.pop('sowin', None)
             print(
                 red("COINDIR\\include\\Inventor\\Win\\SoWin.h not found. (SoWin bindings won't be built)"
                     ))
         print(blue("Checking for QTDIR environment variable..."))
         if os.getenv("QTDIR"):
             print(blue(os.getenv("QTDIR")))
         else:
             self.MODULES.pop('soqt', None)
             print(red("not set. (SoQt bindings won't be built)"))
     else:
         for gui in self.SOGUI:
             if gui not in self.MODULES:
                 continue
             gui_config_cmd = self.MODULES[gui][1]
             if not self.check_cmd_exists(gui_config_cmd):
                 self.MODULES.pop(gui, None)
             else:
                 print(blue("Checking for %s version..." % gui))
                 version = self.do_os_popen("%s --version" % gui_config_cmd)
                 print(blue("%s" % version))
Exemple #6
0
def get_slotted_cps(cpvs, logger):
	"""Uses portage to reduce the cpv list into a cp:slot list and returns it
	"""
	from portage.versions import catpkgsplit
	from portage import portdb

	cps = []
	for cpv in cpvs:
		parts = catpkgsplit(cpv)
		if not parts:
			logger.warning(('\t' + red("Failed to split the following pkg: "
				"%s, not a valid cat/pkg-ver" %cpv)))
			continue

		cp = parts[0] + '/' + parts[1]
		try:
			slot = portdb.aux_get(cpv, ["SLOT"])
		except KeyError:
			match, slot = get_best_match(cpv, cp, logger)
			if not match:
				logger.warning('\t' + red("Installed package: "
					"%s is no longer available" %cp))
				continue

		if slot[0]:
			cps.append(cp + ":" + slot[0])
		else:
			cps.append(cp)

	return cps
Exemple #7
0
	def detect_conflicts(options):
		"""Determine if the checkout has cvs conflicts.

		TODO(antarus): Also this should probably not call sys.exit() as
		repoman is run on >1 packages and one failure should not cause
		subsequent packages to fail.

		Returns:
			None (calls sys.exit on fatal problems)
		"""

		cmd = ("cvs -n up 2>/dev/null | "
				"egrep '^[^\?] .*' | "
				"egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
		msg = ("Performing a %s with a little magic grep to check for updates."
				% green("cvs -n up"))

		logging.info(msg)
		# Use Popen instead of getstatusoutput(), in order to avoid
		# unicode handling problems (see bug #310789).
		args = [BASH_BINARY, "-c", cmd]
		args = [_unicode_encode(x) for x in args]
		proc = subprocess.Popen(
			args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
		out = _unicode_decode(proc.communicate()[0])
		proc.wait()
		mylines = out.splitlines()
		myupdates = []
		for line in mylines:
			if not line:
				continue

			# [ ] Unmodified (SVN)	[U] Updates		[P] Patches
			# [M] Modified			[A] Added		[R] Removed / Replaced
			# [D] Deleted
			if line[0] not in " UPMARD":
				# Stray Manifest is fine, we will readd it anyway.
				if line[0] == '?' and line[1:].lstrip() == 'Manifest':
					continue
				logging.error(red(
					"!!! Please fix the following issues reported "
					"from cvs: %s" % green("(U,P,M,A,R,D are ok)")))
				logging.error(red(
					"!!! Note: This is a pretend/no-modify pass..."))
				logging.error(out)
				sys.exit(1)
			elif line[0] in "UP":
				myupdates.append(line[2:])

		if myupdates:
			logging.info(green("Fetching trivial updates..."))
			if options.pretend:
				logging.info("(cvs update " + " ".join(myupdates) + ")")
				retval = os.EX_OK
			else:
				retval = os.system("cvs update " + " ".join(myupdates))
			if retval != os.EX_OK:
				logging.fatal("!!! cvs exited with an error. Terminating.")
				sys.exit(retval)
		return False
Exemple #8
0
    def _vcs_autoadd(self, myunadded):
        myautoadd = []
        if myunadded:
            for x in range(len(myunadded) - 1, -1, -1):
                xs = myunadded[x].split("/")
                if self.repo_settings.repo_config.find_invalid_path_char(
                        myunadded[x]) != -1:
                    # The Manifest excludes this file,
                    # so it's safe to ignore.
                    del myunadded[x]
                elif xs[-1] == "files":
                    print("!!! files dir is not added! Please correct this.")
                    sys.exit(-1)
                elif xs[-1] == "Manifest":
                    # It's a manifest... auto add
                    myautoadd += [myunadded[x]]
                    del myunadded[x]

        if myunadded:
            print(
                red("!!! The following files are in your local tree"
                    " but are not added to the master"))
            print(
                red("!!! tree. Please remove them from the local tree"
                    " or add them to the master tree."))
            for x in myunadded:
                print("   ", x)
            print()
            print()
            sys.exit(1)
        return myautoadd
Exemple #9
0
	def detect_conflicts(options):
		"""Determine if the checkout has cvs conflicts.

		TODO(antarus): Also this should probably not call sys.exit() as
		repoman is run on >1 packages and one failure should not cause
		subsequent packages to fail.

		Returns:
			None (calls sys.exit on fatal problems)
		"""

		cmd = (r"cvs -n up 2>/dev/null | "
				r"egrep '^[^\?] .*' | "
				r"egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
		msg = ("Performing a %s with a little magic grep to check for updates."
				% green("cvs -n up"))

		logging.info(msg)
		# Use Popen instead of getstatusoutput(), in order to avoid
		# unicode handling problems (see bug #310789).
		args = [BASH_BINARY, "-c", cmd]
		args = [_unicode_encode(x) for x in args]
		proc = subprocess.Popen(
			args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
		out = _unicode_decode(proc.communicate()[0])
		proc.wait()
		mylines = out.splitlines()
		myupdates = []
		for line in mylines:
			if not line:
				continue

			# [ ] Unmodified (SVN)	[U] Updates		[P] Patches
			# [M] Modified			[A] Added		[R] Removed / Replaced
			# [D] Deleted
			if line[0] not in " UPMARD":
				# Stray Manifest is fine, we will readd it anyway.
				if line[0] == '?' and line[1:].lstrip() == 'Manifest':
					continue
				logging.error(red(
					"!!! Please fix the following issues reported "
					"from cvs: %s" % green("(U,P,M,A,R,D are ok)")))
				logging.error(red(
					"!!! Note: This is a pretend/no-modify pass..."))
				logging.error(out)
				sys.exit(1)
			elif line[0] in "UP":
				myupdates.append(line[2:])

		if myupdates:
			logging.info(green("Fetching trivial updates..."))
			if options.pretend:
				logging.info("(cvs update " + " ".join(myupdates) + ")")
				retval = os.EX_OK
			else:
				retval = os.system("cvs update " + " ".join(myupdates))
			if retval != os.EX_OK:
				logging.fatal("!!! cvs exited with an error. Terminating.")
				sys.exit(retval)
		return False
Exemple #10
0
    def check_gui_bindings(self):
        '''check for availability of SoGui bindings and removes the not available ones'''

        print(yellow('\ncheck_gui_bindings is not supported in this version'))
        print(yellow('soqt is build by default'))
        print(yellow('make sure you have installed the soqt library + headers\n'))
        return #TODO

        if sys.platform == "_win32":
            self.MODULES.pop('soxt', None)
            self.MODULES.pop('sogtk', None)
            print(blue("Checking for SoWin..."))
            if not os.path.exists(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "Win", "SoWin.h")):
                self.MODULES.pop('sowin', None)
                print(red("COINDIR\\include\\Inventor\\Win\\SoWin.h not found. (SoWin bindings won't be built)"))
            print(blue("Checking for QTDIR environment variable..."))
            if os.getenv("QTDIR"):
                print(blue(os.getenv("QTDIR")))
            else:
                self.MODULES.pop('soqt', None)
                print(red("not set. (SoQt bindings won't be built)"))
        else:
            for gui in self.SOGUI:
                if gui not in self.MODULES:
                    continue
                gui_config_cmd = self.MODULES[gui][1]
                if not self.check_cmd_exists(gui_config_cmd):
                    self.MODULES.pop(gui, None)
                else:
                    print(blue("Checking for %s version..." % gui))
                    version = self.do_os_popen("%s --version" % gui_config_cmd)
                    print(blue("%s" % version))
Exemple #11
0
    def check_with_cmake(self):
        dirname = os.path.dirname(__file__)
        cmake_command = ['cmake', dirname]
        try:
            cmake_command += ['-G', os.environ['GENERATOR']]
        except KeyError:
            pass
        print(yellow('calling: ' + cmake_command[0] + ' ' + cmake_command[1]))
        cmake = subprocess.Popen(cmake_command,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
        cmake_out, cmake_err = cmake.communicate()
        coin_vars = [
            'COIN_FOUND', 'COIN_VERSION', 'COIN_INCLUDE_DIR', 'COIN_LIB_DIR'
        ]
        soqt_vars = [
            'SOQT_FOUND', 'SOQT_VERSION', 'SOQT_INCLUDE_DIR', 'SOQT_LIB_DIR'
        ]
        config_dict = {}
        if cmake.returncode == 0:
            for line in cmake_out.decode("utf-8").split("\n"):
                for var in coin_vars + soqt_vars:
                    if var in line:
                        line = (line.replace('-- ' + var,
                                             '').replace(': ',
                                                         '').replace('\n', ''))
                        config_dict[var] = line

        print(yellow('\nchecking for COIN via cmake'))
        for key in coin_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        print(yellow('\nchecking for SOQT via cmake'))
        for key in soqt_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        if config_dict.get('SOQT_FOUND', 'false') == 'false':
            pivy_build.MODULES.pop('soqt')
            print(red("\ndisable soqt, because cmake couldn't find it"))
        else:
            try:
                import qtinfo
                self.QTINFO = qtinfo.QtInfo()
            except Exception as e:
                import traceback
                print(
                    red("\ndisable soqt, because there was a problem running qtinfo (needs qmake)"
                        ))
                print(red("-" * 60))
                print(red(traceback.print_exc()))
                print(red("-" * 60))
                pivy_build.MODULES.pop('soqt')

        self.cmake_config_dict = config_dict
        if self.cmake_config_dict.get('COIN_FOUND', 'false') == 'false':
            raise (RuntimeError(
                'coin was not found, but you need coin to build pivy'))
def detect_vcs_conflicts(options, vcs):
	"""Determine if the checkout has problems like cvs conflicts.
	
	If you want more vcs support here just keep adding if blocks...
	This could be better.
	
	TODO(antarus): Also this should probably not call sys.exit() as
	repoman is run on >1 packages and one failure should not cause
	subsequent packages to fail.
	
	Args:
		vcs - A string identifying the version control system in use
	Returns:
		None (calls sys.exit on fatal problems)
	"""
	retval = ("","")
	if vcs == 'cvs':
		logging.info("Performing a " + output.green("cvs -n up") + \
			" with a little magic grep to check for updates.")
		retval = subprocess_getstatusoutput("cvs -n up 2>/dev/null | " + \
			"egrep '^[^\?] .*' | " + \
			"egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
	if vcs == 'svn':
		logging.info("Performing a " + output.green("svn status -u") + \
			" with a little magic grep to check for updates.")
		retval = subprocess_getstatusoutput("svn status -u 2>&1 | " + \
			"egrep -v '^.  +.*/digest-[^/]+' | " + \
			"head -n-1")

	if vcs in ['cvs', 'svn']:
		mylines = retval[1].splitlines()
		myupdates = []
		for line in mylines:
			if not line:
				continue
			if line[0] not in " UPMARD": # unmodified(svn),Updates,Patches,Modified,Added,Removed/Replaced(svn),Deleted(svn)
				# Stray Manifest is fine, we will readd it anyway.
				if line[0] == '?' and line[1:].lstrip() == 'Manifest':
					continue
				logging.error(red("!!! Please fix the following issues reported " + \
					"from cvs: ")+green("(U,P,M,A,R,D are ok)"))
				logging.error(red("!!! Note: This is a pretend/no-modify pass..."))
				logging.error(retval[1])
				sys.exit(1)
			elif vcs == 'cvs' and line[0] in "UP":
				myupdates.append(line[2:])
			elif vcs == 'svn' and line[8] == '*':
				myupdates.append(line[9:].lstrip(" 1234567890"))

		if myupdates:
			logging.info(green("Fetching trivial updates..."))
			if options.pretend:
				logging.info("(" + vcs + " update " + " ".join(myupdates) + ")")
				retval = os.EX_OK
			else:
				retval = os.system(vcs + " update " + " ".join(myupdates))
			if retval != os.EX_OK:
				logging.fatal("!!! " + vcs + " exited with an error. Terminating.")
				sys.exit(retval)
Exemple #13
0
 def check_cmd_exists(self, cmd):
     "return the path of the specified command if it exists"
     print blue("Checking for %s..." % cmd),
     for path in os.environ['PATH'].split(os.path.pathsep):
         if os.path.exists(os.path.join(path, cmd)):
             print blue("'%s'" % os.path.join(path, cmd))
             return 1
     print red("not found.")
     return 0
Exemple #14
0
def detect_vcs_conflicts(options, vcs):
	"""Determine if the checkout has problems like cvs conflicts.
	
	If you want more vcs support here just keep adding if blocks...
	This could be better.
	
	TODO(antarus): Also this should probably not call sys.exit() as
	repoman is run on >1 packages and one failure should not cause
	subsequent packages to fail.
	
	Args:
		vcs - A string identifying the version control system in use
	Returns:
		None (calls sys.exit on fatal problems)
	"""
	retval = ("","")
	if vcs == 'cvs':
		logging.info("Performing a " + output.green("cvs -n up") + \
			" with a little magic grep to check for updates.")
		retval = subprocess_getstatusoutput("cvs -n up 2>&1 | " + \
			"egrep '^[^\?] .*' | " + \
			"egrep -v '^. .*/digest-[^/]+|^cvs server: .* -- ignored$'")
	if vcs == 'svn':
		logging.info("Performing a " + output.green("svn status -u") + \
			" with a little magic grep to check for updates.")
		retval = subprocess_getstatusoutput("svn status -u 2>&1 | " + \
			"egrep -v '^.  +.*/digest-[^/]+' | " + \
			"head -n-1")

	if vcs in ['cvs', 'svn']:
		mylines = retval[1].splitlines()
		myupdates = []
		for line in mylines:
			if not line:
				continue
			if line[0] not in "UPMARD": # Updates,Patches,Modified,Added,Removed/Replaced(svn),Deleted(svn)
				logging.error(red("!!! Please fix the following issues reported " + \
					"from cvs: ")+green("(U,P,M,A,R,D are ok)"))
				logging.error(red("!!! Note: This is a pretend/no-modify pass..."))
				logging.error(retval[1])
				sys.exit(1)
			elif vcs == 'cvs' and line[0] in "UP":
				myupdates.append(line[2:])
			elif vcs == 'svn' and line[8] == '*':
				myupdates.append(line[9:].lstrip(" 1234567890"))

		if myupdates:
			logging.info(green("Fetching trivial updates..."))
			if options.pretend:
				logging.info("(" + vcs + " update " + " ".join(myupdates) + ")")
				retval = os.EX_OK
			else:
				retval = os.system(vcs + " update " + " ".join(myupdates))
			if retval != os.EX_OK:
				logging.fatal("!!! " + vcs + " exited with an error. Terminating.")
				sys.exit(retval)
Exemple #15
0
def check_profiles(profiles, archlist):
	for x in archlist:
		if x[0] == "~":
			continue
		if x not in profiles:
			print(red(
				"\"%s\" doesn't have a valid profile listed in profiles.desc." % x))
			print(red(
				"You need to either \"cvs update\" your profiles dir"
				" or follow this"))
			print(red(
				"up with the " + x + " team."))
			print()
Exemple #16
0
	def _vcs_deleted(self, mydeleted):
		if self.vcs_settings.vcs == "hg" and mydeleted:
			print(red(
				"!!! The following files are removed manually"
				" from your local tree but are not"))
			print(red(
				"!!! removed from the repository."
				" Please remove them, using \"hg remove [FILES]\"."))
			for x in mydeleted:
				print("   ", x)
			print()
			print()
			sys.exit(1)
Exemple #17
0
def check_profiles(profiles, archlist):
	for x in archlist:
		if x[0] == "~":
			continue
		if x not in profiles:
			print(red(
				"\"%s\" doesn't have a valid profile listed in profiles.desc." % x))
			print(red(
				"You need to either \"cvs update\" your profiles dir"
				" or follow this"))
			print(red(
				"up with the " + x + " team."))
			print()
Exemple #18
0
 def _vcs_deleted(self, mydeleted):
     if self.vcs_settings.vcs == "hg" and mydeleted:
         print(
             red("!!! The following files are removed manually"
                 " from your local tree but are not"))
         print(
             red("!!! removed from the repository."
                 " Please remove them, using \"hg remove [FILES]\"."))
         for x in mydeleted:
             print("   ", x)
         print()
         print()
         sys.exit(1)
Exemple #19
0
	def _vcs_deleted(self):
		if self.vcs_settings.changes.has_deleted:
			print(red(
				"!!! The following files are removed manually"
				" from your local tree but are not"))
			print(red(
				"!!! removed from the repository."
				" Please remove them, using \"%s remove [FILES]\"."
				% self.vcs_settings.vcs))
			for x in self.vcs_settings.changes.deleted:
				print("   ", x)
			print()
			print()
			sys.exit(1)
Exemple #20
0
 def _vcs_deleted(self):
     if self.vcs_settings.changes.has_deleted:
         print(
             red("!!! The following files are removed manually"
                 " from your local tree but are not"))
         print(
             red("!!! removed from the repository."
                 " Please remove them, using \"%s remove [FILES]\"." %
                 self.vcs_settings.vcs))
         for x in self.vcs_settings.changes.deleted:
             print("   ", x)
         print()
         print()
         sys.exit(1)
Exemple #21
0
def assign_packages(broken, logger, settings):
	''' Finds and returns packages that owns files placed in broken.
		Broken is list of files
	'''
	stime = current_milli_time()

	broken_matcher = _file_matcher()
	for filename in broken:
		broken_matcher.add(filename)

	assigned_pkgs = set()
	assigned_filenames = set()
	for group in os.listdir(settings['PKG_DIR']):
		grppath = settings['PKG_DIR'] + group
		if not os.path.isdir(grppath):
			continue
		for pkg in os.listdir(grppath):
			pkgpath = settings['PKG_DIR'] + group + '/' + pkg
			if not os.path.isdir(pkgpath):
				continue
			f = pkgpath + '/CONTENTS'
			if os.path.exists(f):
				contents_matcher = _file_matcher()
				try:
					with io.open(f, 'r', encoding='utf_8') as cnt:
						for line in cnt.readlines():
							m = re.match('^obj (/[^ ]+)', line)
							if m is not None:
								contents_matcher.add(m.group(1))
				except Exception as e:
					logger.warning(red(' !! Failed to read ' + f))
					logger.warning(red(' !! Error was:' + str(e)))
				else:
					for m in contents_matcher.intersection(broken_matcher):
						found = group+'/'+pkg
						assigned_pkgs.add(found)
						assigned_filenames.add(m)
						logger.info('\t' + green('* ') + m +
									' -> ' + bold(found))

	broken_filenames = set(broken)
	orphaned = broken_filenames.difference(assigned_filenames)
	ftime = current_milli_time()
	logger.debug("\tassign_packages(); assigned "
		"%d packages, %d orphans in %d milliseconds"
		% (len(assigned_pkgs), len(orphaned), ftime-stime))

	return (assigned_pkgs, orphaned)
Exemple #22
0
    def _get_repos(self, auto_sync_only=True, match_repos=None):
        msgs = []
        repos = self.emerge_config.target_config.settings.repositories
        if match_repos is not None:
            # Discard duplicate repository names or aliases.
            match_repos = set(match_repos)
            repos = self._match_repos(match_repos, repos)
            if len(repos) < len(match_repos):
                # Build a set of all the matched repos' names and aliases so we
                # can do a set difference for names that are missing.
                repo_names = set()
                for repo in repos:
                    repo_names.add(repo.name)
                    if repo.aliases is not None:
                        repo_names.update(repo.aliases)
                missing = match_repos - repo_names
                if missing:
                    msgs.append(
                        red(" * ") +
                        "The specified repo(s) were not found: %s" %
                        (" ".join(repo_name for repo_name in missing)) +
                        "\n   ...returning")
                    return (False, repos, msgs)

        if auto_sync_only:
            repos = self._filter_auto(repos)

        sync_disabled = [repo for repo in repos if repo.sync_type is None]
        if sync_disabled:
            repos = [repo for repo in repos if repo.sync_type is not None]
            if match_repos is not None:
                msgs.append(
                    red(" * ") +
                    "The specified repo(s) have sync disabled: %s" %
                    " ".join(repo.name
                             for repo in sync_disabled) + "\n   ...returning")
                return (False, repos, msgs)

        missing_sync_uri = [repo for repo in repos if repo.sync_uri is None]
        if missing_sync_uri:
            repos = [repo for repo in repos if repo.sync_uri is not None]
            msgs.append(
                red(" * ") + "The specified repo(s) are missing sync-uri: %s" %
                " ".join(repo.name
                         for repo in missing_sync_uri) + "\n   ...returning")
            return (False, repos, msgs)

        return (True, repos, msgs)
Exemple #23
0
def save_cache(logger, to_save=None, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
	''' Tries to store caching information.
		@param logger
		@param to_save have to be dict with keys:
			libraries, la_libraries, libraries_links and binaries
	'''

	if to_save is None:
		to_save = {}

# TODO: Don't blindly make the cache directory, see Bug 203414
#	if not os.path.exists(temp_path):
#		os.makedirs(temp_path)

	try:
		_file = open(os.path.join(temp_path, 'timestamp'), 'w')
		_file.write(str(int(time.time())))
		_file.close()

		for key, val in list(to_save.items()):
			_file = open(os.path.join(temp_path, key), 'w')
			for line in val:
				_file.write(line + '\n')
			_file.close()
	except Exception as ex:
		logger.warn(red('Could not save cache: %s' %str(ex)))
Exemple #24
0
def masking(mask):
    """Returns a 'masked by' string."""
    if "package.mask" in mask or "profile" in mask:
        # use porthole wrap style to help clarify meaning
        return output.red("M[" + mask[0] + "]")
    if mask is not []:
        for status in mask:
            if "keyword" in status:
                # keyword masked | " [missing keyword] " <=looks better
                return output.blue("[" + status + "]")
            if status in archlist:
                return output.green(status)
            if "unknown" in status:
                return output.yellow(status)
        return output.red(status)
    return ""
Exemple #25
0
def masking(mask):
	"""Returns a 'masked by' string."""
	if 'package.mask' in mask or 'profile' in mask:
		# use porthole wrap style to help clarify meaning
		return output.red("M["+mask[0]+"]")
	if mask is not []:
		for status in mask:
			if 'keyword' in status:
				# keyword masked | " [missing keyword] " <=looks better
				return output.blue("["+status+"]")
			if status in archlist:
				return output.green(status)
			if 'unknown' in status:
				return output.yellow(status)
		return output.red(status)
	return ''
Exemple #26
0
def search_ebuilds(path, portdir=True, searchdef="", repo_num="",
        config=None, data=None):
    pv = ""
    pkgs = []
    nr = len(data['ebuilds']) + 1

    if portdir:
        rep = darkgreen("Portage    ")
    else:
        rep = red("Overlay "+str(repo_num)+"  ")

    if isdir(path):
        filelist = listdir(path)

        for file in filelist:
            if file[-7:] == ".ebuild":
                pv = file[:-7]
                pkgs.append(list(pkgsplit(pv)))
                pkgs[-1].append(path + file)
                if searchdef != "" and pv == searchdef:
                    data['defebuild'] = (searchdef, pkgs[-1][3])
        if not portdir:
            config['found_in_overlay'] = True
        pkgs.sort(key=cmp_sort_key(mypkgcmp))
        for pkg in pkgs:
            rev = ""
            if pkg[2] != "r0":
                rev = "-" + pkg[2]
            data['output'].append(" " + rep + " [" + bold(str(nr)) + "] " +
                pkg[0] + "-" + pkg[1] + rev + "\n")
            data['ebuilds'].append(pkg[len(pkg)-1])
            nr += 1
Exemple #27
0
def save_cache(logger, to_save={}, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
	''' Tries to store caching information.
		@param logger
		@param to_save have to be dict with keys:
			libraries, la_libraries, libraries_links and binaries
	'''

	if not os.path.exists(temp_path):
		os.makedirs(temp_path)

	try:
		_file = open(_unicode_encode(os.path.join(temp_path, 'timestamp'),
			encoding=_encodings['fs']), mode='w', encoding=_encodings['content'])
		_file.write(_unicode(int(time.time())))
		_file.close()

		for key,val in to_save.items():
			_file = open(_unicode_encode(os.path.join(temp_path, key),
				encoding=_encodings['fs']), mode='w',
				encoding=_encodings['content'])
			for line in val:
				_file.write(line + '\n')
			_file.close()
	except Exception as ex:
		logger.warning('\t' + red('Could not save cache: %s' %str(ex)))
Exemple #28
0
def save_cache(logger, to_save={}, temp_path=DEFAULTS["DEFAULT_TMP_DIR"]):
    """Tries to store caching information.
    @param logger
    @param to_save have to be dict with keys:
            libraries, la_libraries, libraries_links and binaries
    """

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    try:
        _file = open(
            _unicode_encode(os.path.join(temp_path, "timestamp"),
                            encoding=_encodings["fs"]),
            mode="w",
            encoding=_encodings["content"],
        )
        _file.write(str(int(time.time())))
        _file.close()

        for key, val in to_save.items():
            _file = open(
                _unicode_encode(os.path.join(temp_path, key),
                                encoding=_encodings["fs"]),
                mode="w",
                encoding=_encodings["content"],
            )
            for line in val:
                _file.write(line + "\n")
            _file.close()
    except Exception as ex:
        logger.warning("\t" + red("Could not save cache: %s" % str(ex)))
Exemple #29
0
def save_cache(logger, to_save={}, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
    ''' Tries to store caching information.
		@param logger
		@param to_save have to be dict with keys:
			libraries, la_libraries, libraries_links and binaries
	'''

    if not os.path.exists(temp_path):
        os.makedirs(temp_path)

    try:
        _file = open(_unicode_encode(os.path.join(temp_path, 'timestamp'),
                                     encoding=_encodings['fs']),
                     mode='w',
                     encoding=_encodings['content'])
        _file.write(str(int(time.time())))
        _file.close()

        for key, val in to_save.items():
            _file = open(_unicode_encode(os.path.join(temp_path, key),
                                         encoding=_encodings['fs']),
                         mode='w',
                         encoding=_encodings['content'])
            for line in val:
                _file.write(line + '\n')
            _file.close()
    except Exception as ex:
        logger.warning('\t' + red('Could not save cache: %s' % str(ex)))
Exemple #30
0
def save_cache(logger, to_save=None, temp_path=DEFAULTS['DEFAULT_TMP_DIR']):
	''' Tries to store caching information.
		@param logger
		@param to_save have to be dict with keys: 
			libraries, la_libraries, libraries_links and binaries
	'''
	
	if to_save is None:
		to_save = {}

# TODO: Don't blindly make the cache directory, see Bug 203414
#	if not os.path.exists(temp_path):
#		os.makedirs(temp_path)

	try:
		_file = open(os.path.join(temp_path, 'timestamp'), 'w')
		_file.write(str(int(time.time())))
		_file.close()

		for key, val in list(to_save.items()):
			_file = open(os.path.join(temp_path, key), 'w')
			for line in val:
				_file.write(line + '\n')
			_file.close()
	except Exception as ex:
		logger.warn(red('Could not save cache: %s' %str(ex)))
Exemple #31
0
def keyword(string, stable=True, hard_masked=False):
	"""Returns a keyword string."""
	if stable:
		return output.green(string)
	if hard_masked:
		return output.red(string)
	# keyword masked:
	return output.blue(string)
Exemple #32
0
    def copy_and_swigify_headers(self, includedir, dirname, files):
        """Copy the header files to the local include directories. Add an
        #include line at the beginning for the SWIG interface files..."""

        for file in files:
            if not os.path.isfile(os.path.join(dirname, file)):
                continue

            if file[-2:] == ".i":
                file_i = os.path.join(dirname, file)
                file_h = os.path.join(dirname, file)[:-2] + ".h"

                if (not os.path.exists(file_h) and
                    os.path.exists(os.path.join(includedir, file_h))):
                    shutil.copyfile(os.path.join(includedir, file_h), file_h)
                    sys.stdout.write(' ' + turquoise(file_h))
                    fd = open(file_h, 'r+')
                    contents = fd.readlines()

                    ins_line_nr = -1
                    for line in contents:
                        ins_line_nr += 1
                        if line.find("#include ") != -1:
                            break

                    if ins_line_nr != -1:
                        contents.insert(ins_line_nr, self.pivy_header_include % (file_i))
                        fd.seek(0)
                        fd.writelines(contents)
                    else:
                        print blue("[") + red("failed") + blue("]")
                        sys.exit(1)
                    fd.close
            # fixes for SWIG 1.3.21 and upwards
            # (mostly workarounding swig's preprocessor "function like macros"
            # preprocessor bug when no parameters are provided which then results
            # in no constructors being created in the wrapper)
            elif file[-4:] == ".fix":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-4]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-4])
            # had to introduce this because windows is a piece of crap
            elif sys.platform == "win32" and file[-6:] == ".win32":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-6]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-6])
Exemple #33
0
	def __init__(self, options=None, repoman_settings=None):
		if options.vcs:
			if options.vcs in ('cvs', 'svn', 'git', 'bzr', 'hg'):
				self.vcs = options.vcs
			else:
				self.vcs = None
		else:
			vcses = FindVCS()
			if len(vcses) > 1:
				print(red(
					'*** Ambiguous workdir -- more than one VCS found'
					' at the same depth: %s.' % ', '.join(vcses)))
				print(red(
					'*** Please either clean up your workdir'
					' or specify --vcs option.'))
				sys.exit(1)
			elif vcses:
				self.vcs = vcses[0]
			else:
				self.vcs = None

		if options.if_modified == "y" and self.vcs is None:
			logging.info(
				"Not in a version controlled repository; "
				"disabling --if-modified.")
			options.if_modified = "n"

		# Disable copyright/mtime check if vcs does not preserve mtime (bug #324075).
		self.vcs_preserves_mtime = self.vcs in ('cvs',)

		self.vcs_local_opts = repoman_settings.get(
			"REPOMAN_VCS_LOCAL_OPTS", "").split()
		self.vcs_global_opts = repoman_settings.get(
			"REPOMAN_VCS_GLOBAL_OPTS")
		if self.vcs_global_opts is None:
			if self.vcs in ('cvs', 'svn'):
				self.vcs_global_opts = "-q"
			else:
				self.vcs_global_opts = ""
		self.vcs_global_opts = self.vcs_global_opts.split()

		if options.mode == 'commit' and not options.pretend and not self.vcs:
			logging.info(
				"Not in a version controlled repository; "
				"enabling pretend mode.")
			options.pretend = True
Exemple #34
0
    def copy_and_swigify_headers(self, includedir, dirname, files):
        """Copy the header files to the local include directories. Add an
        #include line at the beginning for the SWIG interface files..."""
        for file in files:
            if not os.path.isfile(os.path.join(dirname, file)):
                continue

            if file[-2:] == ".i":
                file_i = os.path.join(dirname, file)
                file_h = os.path.join(dirname, file)[:-2] + ".h"

                if (not os.path.exists(file_h)
                        and os.path.exists(os.path.join(includedir, file_h))):
                    shutil.copyfile(os.path.join(includedir, file_h), file_h)
                    sys.stdout.write(' ' + turquoise(file_h))
                    fd = open(file_h, 'r+')
                    contents = fd.readlines()

                    ins_line_nr = -1
                    for line in contents:
                        ins_line_nr += 1
                        if line.find("#include ") != -1:
                            break

                    if ins_line_nr != -1:
                        contents.insert(ins_line_nr,
                                        self.pivy_header_include % (file_i))
                        fd.seek(0)
                        fd.writelines(contents)
                    else:
                        print(blue("[") + red("failed") + blue("]"))
                        sys.exit(1)
                    fd.close
            # fixes for SWIG 1.3.21 and upwards
            # (mostly workarounding swig's preprocessor "function like macros"
            # preprocessor bug when no parameters are provided which then results
            # in no constructors being created in the wrapper)
            elif file[-4:] == ".fix":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-4]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-4])
            # had to introduce this because windows is a piece of crap
            elif sys.platform == "win32" and file[-6:] == ".win32":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-6]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-6])
Exemple #35
0
    def __str__(self):
        output = []

        if self.interactive:
            output.append(colorize("WARN", "I"))
        else:
            output.append(" ")

        if self.new or self.force_reinstall:
            if self.force_reinstall:
                output.append(red("r"))
            else:
                output.append(green("N"))
        else:
            output.append(" ")

        if self.new_slot or self.replace:
            if self.replace:
                output.append(yellow("R"))
            else:
                output.append(green("S"))
        else:
            output.append(" ")

        if self.fetch_restrict or self.fetch_restrict_satisfied:
            if self.fetch_restrict_satisfied:
                output.append(green("f"))
            else:
                output.append(red("F"))
        else:
            output.append(" ")

        if self.new_version:
            output.append(turquoise("U"))
        else:
            output.append(" ")

        if self.downgrade:
            output.append(blue("D"))
        else:
            output.append(" ")

        if self.mask is not None:
            output.append(self.mask)

        return "".join(output)
	def __str__(self):
		output = []

		if self.interactive:
			output.append(colorize("WARN", "I"))
		else:
			output.append(" ")

		if self.new or self.force_reinstall:
			if self.force_reinstall:
				output.append(red("r"))
			else:
				output.append(green("N"))
		else:
			output.append(" ")

		if self.new_slot or self.replace:
			if self.replace:
				output.append(yellow("R"))
			else:
				output.append(green("S"))
		else:
			output.append(" ")

		if self.fetch_restrict or self.fetch_restrict_satisfied:
			if self.fetch_restrict_satisfied:
				output.append(green("f"))
			else:
				output.append(red("F"))
		else:
			output.append(" ")

		if self.new_version:
			output.append(turquoise("U"))
		else:
			output.append(" ")

		if self.downgrade:
			output.append(blue("D"))
		else:
			output.append(" ")

		if self.mask is not None:
			output.append(self.mask)

		return "".join(output)
Exemple #37
0
def keyword(string, stable=True, hard_masked=False):
    """Returns a keyword string."""
    if stable:
        return output.green(string)
    if hard_masked:
        return output.red(string)
    # keyword masked:
    return output.blue(string)
Exemple #38
0
 def check_cmd_exists(self, cmd):
     "return the path of the specified command if it exists"
     print(blue("Checking for %s..." % cmd))
     for path in os.environ['PATH'].split(os.path.pathsep):
         if os.path.exists(os.path.join(path, cmd)):
             print(blue("'%s'" % os.path.join(path, cmd)))
             return 1
     print(red("not found."))
     return 0
Exemple #39
0
    def __init__(self, options=None, repoman_settings=None):
        if options.vcs:
            if options.vcs in ('cvs', 'svn', 'git', 'bzr', 'hg'):
                self.vcs = options.vcs
            else:
                self.vcs = None
        else:
            vcses = FindVCS()
            if len(vcses) > 1:
                print(
                    red('*** Ambiguous workdir -- more than one VCS found'
                        ' at the same depth: %s.' % ', '.join(vcses)))
                print(
                    red('*** Please either clean up your workdir'
                        ' or specify --vcs option.'))
                sys.exit(1)
            elif vcses:
                self.vcs = vcses[0]
            else:
                self.vcs = None

        if options.if_modified == "y" and self.vcs is None:
            logging.info("Not in a version controlled repository; "
                         "disabling --if-modified.")
            options.if_modified = "n"

        # Disable copyright/mtime check if vcs does not preserve mtime (bug #324075).
        self.vcs_preserves_mtime = self.vcs in ('cvs', )

        self.vcs_local_opts = repoman_settings.get("REPOMAN_VCS_LOCAL_OPTS",
                                                   "").split()
        self.vcs_global_opts = repoman_settings.get("REPOMAN_VCS_GLOBAL_OPTS")
        if self.vcs_global_opts is None:
            if self.vcs in ('cvs', 'svn'):
                self.vcs_global_opts = "-q"
            else:
                self.vcs_global_opts = ""
        self.vcs_global_opts = self.vcs_global_opts.split()

        if options.mode == 'commit' and not options.pretend and not self.vcs:
            logging.info("Not in a version controlled repository; "
                         "enabling pretend mode.")
            options.pretend = True
Exemple #40
0
 def __str__(self):
     s = self.name
     if self.enabled:
         s = red(s)
     else:
         s = "-" + s
         s = blue(s)
     if self.forced:
         s = "(%s)" % s
     return s
Exemple #41
0
def commit_check(repolevel, reposplit):
    # Check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting.
    # Reason for this is if they're trying to commit in just $FILESDIR/*,
    # the Manifest needs updating.
    # This check ensures that repoman knows where it is,
    # and the manifest recommit is at least possible.
    if repolevel not in [1, 2, 3]:
        print(red("***") +
              (" Commit attempts *must* be from within a vcs checkout,"
               " category, or package directory."))
        print(red("***") +
              (" Attempting to commit from a packages files directory"
               " will be blocked for instance."))
        print(red("***") +
              (" This is intended behaviour,"
               " to ensure the manifest is recommitted for a package."))
        print(red("***"))
        err("Unable to identify level we're commiting from for %s" %
            '/'.join(reposplit))
Exemple #42
0
	def _get_repos(self, auto_sync_only=True, match_repos=None):
		msgs = []
		repos = self.emerge_config.target_config.settings.repositories
		if match_repos is not None:
			# Discard duplicate repository names or aliases.
			match_repos = set(match_repos)
			repos = self._match_repos(match_repos, repos)
			if len(repos) < len(match_repos):
				# Build a set of all the matched repos' names and aliases so we
				# can do a set difference for names that are missing.
				repo_names = set()
				for repo in repos:
					repo_names.add(repo.name)
					if repo.aliases is not None:
						repo_names.update(repo.aliases)
				missing = match_repos - repo_names
				if missing:
					msgs.append(red(" * ") + "The specified repo(s) were not found: %s" %
						(" ".join(repo_name for repo_name in missing)) + \
						"\n   ...returning")
					return (False, repos, msgs)

		if auto_sync_only:
			repos = self._filter_auto(repos)

		sync_disabled = [repo for repo in repos if repo.sync_type is None]
		if sync_disabled:
			repos = [repo for repo in repos if repo.sync_type is not None]
			if match_repos is not None:
				msgs.append(red(" * " ) + "The specified repo(s) have sync disabled: %s" %
					" ".join(repo.name for repo in sync_disabled) + \
					"\n   ...returning")
				return (False, repos, msgs)

		missing_sync_uri = [repo for repo in repos if repo.sync_uri is None]
		if missing_sync_uri:
			repos = [repo for repo in repos if repo.sync_uri is not None]
			msgs.append(red(" * ") + "The specified repo(s) are missing sync-uri: %s" %
				" ".join(repo.name for repo in missing_sync_uri) + \
				"\n   ...returning")
			return (False, repos, msgs)

		return (True, repos, msgs)
def do_normal(pkg, verbose):
    data = []
    if not pkg[4]:
        installed = "[ Not Installed ]"
    else:
        installed = pkg[4]

    if pkg[2]:
        masked = red(" [ Masked ]")
    else:
        masked = ""

    data.append("%s  %s%s\n      %s %s\n      %s %s" % \
            (green("*"), bold(pkg[1]), masked,
            darkgreen("Latest version available:"), pkg[3],
            darkgreen("Latest version installed:"), installed))

    if verbose:
        mpv = best(portdb.xmatch("match-all", pkg[1]))
        iuse_split, final_use = get_flags(mpv, final_setting=True)
        iuse = ""
        use_list = []
        for ebuild_iuse in iuse_split:
            use = ebuild_iuse.lstrip('+-')
            if use in final_use:
                use_list.append(red("+" + use) + " ")
            else:
                use_list.append(blue("-" + use) + " ")
        use_list.sort()
        iuse = ' '.join(use_list)
        if iuse == "":
            iuse = "-"

        data.append("      %s         %s\n      %s       %s" % \
                (darkgreen("Unstable version:"), pkg_version(mpv),
                 darkgreen("Use Flags (stable):"), iuse))

    data.append("      %s %s\n      %s    %s\n      %s %s\n      %s     %s\n" % \
            (darkgreen("Size of downloaded files:"), pkg[5],
             darkgreen("Homepage:"), pkg[6],
             darkgreen("Description:"), pkg[7],
             darkgreen("License:"), pkg[8]))
    return data, False
	def __str__(self):
		s = self.name
		if self.enabled:
			s = red(s)
		else:
			s = '-' + s
			s = blue(s)
		if self.forced:
			s = '(%s)' % s
		return s
Exemple #45
0
def do_normal(pkg, verbose):
    data = []
    if not pkg[4]:
        installed = "[ Not Installed ]"
    else:
        installed = pkg[4]

    if pkg[2]:
        masked = red(" [ Masked ]")
    else:
        masked = ""

    data.append("%s  %s%s\n      %s %s\n      %s %s" % \
            (green("*"), bold(pkg[1]), masked,
            darkgreen("Latest version available:"), pkg[3],
            darkgreen("Latest version installed:"), installed))

    if verbose:
        mpv = best(portdb.xmatch("match-all", pkg[1]))
        iuse_split, final_use = get_flags(mpv, final_setting=True)
        iuse = ""
        use_list = []
        for ebuild_iuse in iuse_split:
            use = ebuild_iuse.lstrip('+-')
            if use in final_use:
                use_list.append(red("+" + use) + " ")
            else:
                use_list.append(blue("-" + use) + " ")
        use_list.sort()
        iuse = ' '.join(use_list)
        if iuse == "":
            iuse = "-"

        data.append("      %s         %s\n      %s       %s" % \
                (darkgreen("Unstable version:"), pkg_version(mpv),
                 darkgreen("Use Flags (stable):"), iuse))

    data.append("      %s %s\n      %s    %s\n      %s %s\n      %s     %s\n" % \
            (darkgreen("Size of downloaded files:"), pkg[5],
             darkgreen("Homepage:"), pkg[6],
             darkgreen("Description:"), pkg[7],
             darkgreen("License:"), pkg[8]))
    return data
Exemple #46
0
 def __str__(self):
     s = self.name
     if self.enabled:
         s = red(s)
     else:
         s = '-' + s
         s = blue(s)
     if self.forced:
         s = '(%s)' % s
     return s
Exemple #47
0
def commit_check(repolevel, reposplit):
	# Check if it's in $PORTDIR/$CATEGORY/$PN , otherwise bail if commiting.
	# Reason for this is if they're trying to commit in just $FILESDIR/*,
	# the Manifest needs updating.
	# This check ensures that repoman knows where it is,
	# and the manifest recommit is at least possible.
	if repolevel not in [1, 2, 3]:
		print(red("***") + (
			" Commit attempts *must* be from within a vcs checkout,"
			" category, or package directory."))
		print(red("***") + (
			" Attempting to commit from a packages files directory"
			" will be blocked for instance."))
		print(red("***") + (
			" This is intended behaviour,"
			" to ensure the manifest is recommitted for a package."))
		print(red("***"))
		err(
			"Unable to identify level we're commiting from for %s" %
			'/'.join(reposplit))
Exemple #48
0
	def _check_capable(self):
		if self.options.mode == "manifest":
			return
		self.binary = find_binary('xmllint')
		if not self.binary:
			print(red("!!! xmllint not found. Can't check metadata.xml.\n"))
		else:
			if not fetch_metadata_dtd(self.metadata_dtd, self.repoman_settings):
				sys.exit(1)
			# this can be problematic if xmllint changes their output
			self._is_capable = True
Exemple #49
0
    def total(self, mode, size, num_files, verb, action):
        """outputs the formatted totals to stdout

		@param mode: sets color and message. 1 of ['normal', 'deprecated']
		@param size: total space savings
		@param num_files: total number of files
		@param verb: string eg. 1 of ["would be", "has been"]
		@param action: string eg 1 of ['distfiles', 'packages']
		"""
        self.set_colors(mode)
        if mode == "normal":
            message="Total space from "+red(str(num_files))+" files "+\
             verb+" freed in the " + action + " directory"
            print(" ===========")
            print(self.prettySize(size, True, red), message)
        elif mode == "deprecated":
            message = "Total space from "+red(str(num_files))+" package files\n"+\
             "   Re-run the last command with the -D " +\
             "option to clean them as well"
            print(" ===========")
            print(self.prettySize(size, True, red), message)
Exemple #50
0
    def check_with_cmake(self):
        dirname = os.path.dirname(__file__)
        cmake_command = ['cmake', dirname]
        try:
            cmake_command += ['-G', os.environ['GENERATOR']]
        except KeyError:
            pass
        print(yellow('calling: ' + cmake_command[0] + ' ' + cmake_command[1]))
        cmake = subprocess.Popen(cmake_command, stdout=subprocess.PIPE)
        cmake_out, _ = cmake.communicate()
        coin_vars = [
            'COIN_FOUND', 'COIN_VERSION', 'COIN_INCLUDE_DIR', 'COIN_LIB_DIR'
        ]
        soqt_vars = [
            'SOQT_FOUND', 'SOQT_VERSION', 'SOQT_INCLUDE_DIR', 'SOQT_LIB_DIR'
        ]
        config_dict = {}
        if cmake.returncode == 0:
            for line in cmake_out.decode("utf-8").split("\n"):
                for var in coin_vars + soqt_vars:
                    if var in line:
                        line = (line.replace('-- ' + var,
                                             '').replace(': ',
                                                         '').replace('\n', ''))
                        config_dict[var] = line

        # Added overwrite of SOQT_INCLUDE_DIR, because cmake is identifying it incorrectly
        config_dict["SOQT_INCLUDE_DIR"] = "/usr/local/include"
        ##########

        print(yellow('\nchecking for COIN via cmake'))
        for key in coin_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        print(yellow('\nchecking for SOQT via cmake'))
        for key in soqt_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        if config_dict['SOQT_FOUND'] == 'false':
            pivy_build.MODULES.pop('soqt')
            print(red("\ndisable soqt, because cmake couldn't find it"))

        self.cmake_config_dict = config_dict
        if not bool(self.cmake_config_dict['COIN_FOUND']):
            raise (RuntimeError(
                'coin was not found, but you need coin to build pivy'))

        if not bool(self.cmake_config_dict['SOQT_FOUND']):
            raise (RuntimeError(
                'soqt was not found, but you need soqt to build pivy'))
Exemple #51
0
    def eprompt(self, message):
        """Display a user question depending on a color mode.

		@param message: text string to display

		@output to stdout
		"""
        if not self.options['nocolor']:
            prefix = " " + red('>') + " "
        else:
            prefix = "??? "
        sys.stdout.write(prefix + message)
        sys.stdout.flush()
Exemple #52
0
 def _check_capable(self):
     if self.options.mode == "manifest":
         return
     self.binary = find_binary('xmllint')
     if not self.binary:
         print(red("!!! xmllint not found. Can't check metadata.xml.\n"))
         self._is_capable = False
     elif not self._is_capable:
         if not fetch_metadata_dtd(self.metadata_dtd,
                                   self.repoman_settings):
             sys.exit(1)
         # this can be problematic if xmllint changes their output
         self._is_capable = True
Exemple #53
0
 def check_gui_bindings(self):
     "check for availability of SoGui bindings and removes the not available ones"
     if sys.platform == "win32":
         print "Coin and SoWin are built by default on Windows..."
         self.MODULES.pop('soxt', None)
         self.MODULES.pop('sogtk', None)
         print blue("Checking for QTDIR environment variable..."),
         if os.getenv("QTDIR"):
             print blue(os.getenv("QTDIR"))
         else:
             self.MODULES.pop('soqt', None)
             print red("not set. (SoQt bindings won't be built)")
     else:
         for gui in self.SOGUI:
             if not self.MODULES.has_key(gui):
                 continue
             gui_config_cmd = self.MODULES[gui][1]
             if not self.check_cmd_exists(gui_config_cmd):
                 self.MODULES.pop(gui, None)
             else:
                 print blue("Checking for %s version..." % gui),
                 version = self.do_os_popen("%s --version" % gui_config_cmd)
                 print blue("%s" % version)
Exemple #54
0
def parse_options():
	"""Parses the command line options an sets settings accordingly"""

	# TODO: Verify: options: no-ld-path, no-order, no-progress
	#are not appliable

	settings = DEFAULTS.copy()
	try:
		opts, args = getopt.getopt(sys.argv[1:], 
			'dehiklopqvCL:P', 
			['nocolor', 'debug', 'exact', 'help', 'ignore',
			'keep-temp', 'library=', 'no-ld-path', 'no-order',
			'pretend', 'no-pretend', 'no-progress', 'quiet', 'verbose'])

		do_help = False
		for key, val in opts:
			if key in ('-h', '--help'):
				do_help = True
			elif key in ('-q', '--quiet'):
				settings['quiet'] = True
				settings['VERBOSITY'] = 0
			elif key in ('-v', '--verbose'):
				settings['VERBOSITY'] = 2
			elif key in ('-d', '--debug'):
				settings['debug'] = True
				settings['VERBOSITY'] = 3
			elif key in ('-p', '--pretend'):
				settings['PRETEND'] = True
			elif key == '--no-pretend':
				settings['NO_PRETEND'] = True
			elif key in ('-e', '--exact'):
				settings['EXACT'] = True
			elif key in ('-C', '--nocolor', '--no-color'):
				settings['nocolor'] = True
			elif key in ('-L', '--library', '--library='):
				settings['library'] = settings['library'].union(val.split(','))
			elif key in ('-i', '--ignore'):
				settings['USE_TMP_FILES'] = False

		settings['pass_through_options'] = " " + " ".join(args)
	except getopt.GetoptError:
		#logging.info(red('Unrecognized option\n'))
		print(red('Unrecognized option\n'))
		print_usage()
		sys.exit(2)
	if do_help:
		print_usage()
		sys.exit(0)
	return settings
Exemple #55
0
    def swig_generate(self):
        "build all available modules"

        quote = lambda s : '"' + s + '"'
        for module in self.MODULES:
            module_name = self.MODULES[module][0]
            config_cmd = self.MODULES[module][1]
            module_pkg_name = self.MODULES[module][2]
            mod_out_prefix = module_pkg_name.replace('.', os.sep) + module
            
            if sys.platform == "win32":
                INCLUDE_DIR = os.path.join(os.getenv("COINDIR"), "include")
                CPP_FLAGS = "-I" + quote(INCLUDE_DIR) + " " + \
                            "-I" + quote(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "annex")) + \
                            " /DCOIN_DLL /wd4244 /wd4049"
                # aquire highest non-debug Coin library version
                LDFLAGS_LIBS = quote(max(glob.glob(os.path.join(os.getenv("COINDIR"), "lib", "coin?.lib")))) + " "
                if module == "sowin":
                    CPP_FLAGS += " /DSOWIN_DLL"
                    LDFLAGS_LIBS += quote(os.path.join(os.getenv("COINDIR"), "lib", "sowin1.lib"))
                elif module == "soqt":
                    CPP_FLAGS += " -I" + '"' + os.getenv("QTDIR") + "\\include\"  /DSOQT_DLL"
                    CPP_FLAGS += " -I" + '"' + os.getenv("QTDIR") + "\\include\Qt\""
                    LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"), "lib", "soqt1.lib") + " "
            else:
                INCLUDE_DIR = self.do_os_popen("coin-config --includedir")
                if module_name != 'coin':
                    mod_include_dir = self.do_os_popen("%s --includedir" % config_cmd)
                    if mod_include_dir != INCLUDE_DIR:
                        INCLUDE_DIR += '\" -I\"%s' % mod_include_dir
                CPP_FLAGS = self.do_os_popen("%s --cppflags" % config_cmd)
                LDFLAGS_LIBS = self.do_os_popen("%s --ldflags --libs" % config_cmd)

            if not os.path.isfile(mod_out_prefix + "_wrap.cpp"):
                print red("\n=== Generating %s_wrap.cpp for %s ===\n" %
                          (mod_out_prefix, module))
                print blue(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " + self.SWIG_PARAMS %
                           (INCLUDE_DIR,
                            self.CXX_INCS,
                            mod_out_prefix, module))
                if os.system(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " + self.SWIG_PARAMS %
                             (INCLUDE_DIR,
                              self.CXX_INCS,
                              mod_out_prefix, module)):
                    print red("SWIG did not generate wrappers successfully! ** Aborting **")
                    sys.exit(1)
            else:
                print red("=== %s_wrap.cpp for %s already exists! ===" % (mod_out_prefix, module_pkg_name + module))

            self.ext_modules.append(Extension(module_name, [mod_out_prefix + "_wrap.cpp"],
                                              extra_compile_args=(self.CXX_INCS + CPP_FLAGS).split(),
                                              extra_link_args=(self.CXX_LIBS + LDFLAGS_LIBS).split()))
            self.py_modules.append(module_pkg_name + module)
Exemple #56
0
def do_compact(pkg):
    prefix0 = " "
    prefix1 = " "

    if pkg[3] == pkg[4]:
        color = darkgreen
        prefix1 = "I"
    elif not pkg[4]:
        color = darkgreen
        prefix1 = "N"
    else:
        color = turquoise
        prefix1 = "U"

    if pkg[2]:
        prefix0 = "M"

    return " [%s%s] %s (%s):  %s" % \
            (red(prefix0), color(prefix1), bold(pkg[1]), color(pkg[3]), pkg[7])
Exemple #57
0
	def set_pkg_info(self, pkg, ordered):
		"""Sets various pkg_info dictionary variables

		@param pkg: _emerge.Package.Package instance
		@param ordered: bool
		@rtype pkg_info dictionary
		Modifies self.counters.restrict_fetch,
			self.counters.restrict_fetch_satisfied
		"""
		pkg_info = PkgInfo()
		pkg_info.ordered = ordered
		pkg_info.fetch_symbol = " "
		pkg_info.operation = pkg.operation
		pkg_info.merge = ordered and pkg_info.operation == "merge"
		if not pkg_info.merge and pkg_info.operation == "merge":
			pkg_info.operation = "nomerge"
		pkg_info.built = pkg.type_name != "ebuild"
		pkg_info.ebuild_path = None
		pkg_info.repo_name = pkg.repo
		if pkg.type_name == "ebuild":
			pkg_info.ebuild_path = self.portdb.findname(
				pkg.cpv, myrepo=pkg_info.repo_name)
			if pkg_info.ebuild_path is None:
				raise AssertionError(
					"ebuild not found for '%s'" % pkg.cpv)
			pkg_info.repo_path_real = os.path.dirname(os.path.dirname(
				os.path.dirname(pkg_info.ebuild_path)))
		else:
			pkg_info.repo_path_real = \
				self.portdb.getRepositoryPath(pkg.metadata["repository"])
		pkg_info.use = list(self.conf.pkg_use_enabled(pkg))
		if not pkg.built and pkg.operation == 'merge' and \
			'fetch' in pkg.metadata.restrict:
			pkg_info.fetch_symbol = red("F")
			if pkg_info.ordered:
				self.counters.restrict_fetch += 1
			if not self.portdb.getfetchsizes(pkg.cpv,
				useflags=pkg_info.use, myrepo=pkg.repo):
				pkg_info.fetch_symbol = green("f")
				if pkg_info.ordered:
					self.counters.restrict_fetch_satisfied += 1
		return pkg_info