Exemple #1
0
    def check_gui_bindings(self):
        '''check for availability of SoGui bindings and removes the not available ones'''

        print(yellow('\ncheck_gui_bindings is not supported in this version'))
        print(yellow('soqt is build by default'))
        print(yellow('make sure you have installed the soqt library + headers\n'))
        return #TODO

        if sys.platform == "_win32":
            self.MODULES.pop('soxt', None)
            self.MODULES.pop('sogtk', None)
            print(blue("Checking for SoWin..."))
            if not os.path.exists(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "Win", "SoWin.h")):
                self.MODULES.pop('sowin', None)
                print(red("COINDIR\\include\\Inventor\\Win\\SoWin.h not found. (SoWin bindings won't be built)"))
            print(blue("Checking for QTDIR environment variable..."))
            if os.getenv("QTDIR"):
                print(blue(os.getenv("QTDIR")))
            else:
                self.MODULES.pop('soqt', None)
                print(red("not set. (SoQt bindings won't be built)"))
        else:
            for gui in self.SOGUI:
                if gui not in self.MODULES:
                    continue
                gui_config_cmd = self.MODULES[gui][1]
                if not self.check_cmd_exists(gui_config_cmd):
                    self.MODULES.pop(gui, None)
                else:
                    print(blue("Checking for %s version..." % gui))
                    version = self.do_os_popen("%s --version" % gui_config_cmd)
                    print(blue("%s" % version))
Exemple #2
0
def chk_updated_cfg_files(eroot, config_protect):
    target_root = eroot
    result = list(
        portage.util.find_updated_config_files(target_root, config_protect))

    for x in result:
        writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))),
                       level=logging.INFO,
                       noiselevel=-1)
        if not x[1]:  # it's a protected file
            writemsg_level(_("config file '%s' needs updating.\n") % x[0],
                           level=logging.INFO,
                           noiselevel=-1)
        else:  # it's a protected dir
            if len(x[1]) == 1:
                head, tail = os.path.split(x[1][0])
                tail = tail[len("._cfg0000_"):]
                fpath = os.path.join(head, tail)
                writemsg_level(_("config file '%s' needs updating.\n") % fpath,
                               level=logging.INFO,
                               noiselevel=-1)
            else:
                writemsg_level(
                 _("%d config files in '%s' need updating.\n") % \
                 (len(x[1]), x[0]), level=logging.INFO, noiselevel=-1)

    if result:
        print(" " + yellow("*") + " See the " +
              colorize("INFORM", _("CONFIGURATION FILES")) + " " +
              _("section of the") + " " + bold("emerge"))
        print(" " + yellow("*") + " " +
              _("man page to learn how to update config files."))
Exemple #3
0
def get_best_match(cpv, cp, logger):
	"""Tries to find another version of the pkg with the same slot
	as the deprecated installed version.  Failing that attempt to get any version
	of the same app

	@param cpv: string
	@param cp: string
	@rtype tuple: ([cpv,...], SLOT)
	"""

	slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"])[0]
	logger.warning('\t%s "%s" %s.' % (yellow('* Warning:'), cpv,bold('ebuild not found.')))
	logger.debug('\tget_best_match(); Looking for %s:%s' %(cp, slot))
	try:
		match = portdb.match('%s:%s' %(cp, slot))
	except portage.exception.InvalidAtom:
		match = None

	if not match:
		logger.warning('\t' + red('!!') + ' ' + yellow(
			'Could not find ebuild for %s:%s' %(cp, slot)))
		slot = ['']
		match = portdb.match(cp)
		if not match:
			logger.warning('\t' + red('!!') + ' ' +
				yellow('Could not find ebuild for ' + cp))
	return match, slot
def chk_updated_cfg_files(eroot, config_protect):
	target_root = eroot
	result = list(
		portage.util.find_updated_config_files(target_root, config_protect))

	for x in result:
		writemsg_level("\n %s " % (colorize("WARN", "* " + _("IMPORTANT:"))),
			level=logging.INFO, noiselevel=-1)
		if not x[1]: # it's a protected file
			writemsg_level( _("config file '%s' needs updating.\n") % x[0],
				level=logging.INFO, noiselevel=-1)
		else: # it's a protected dir
			if len(x[1]) == 1:
				head, tail = os.path.split(x[1][0])
				tail = tail[len("._cfg0000_"):]
				fpath = os.path.join(head, tail)
				writemsg_level(_("config file '%s' needs updating.\n") % fpath,
					level=logging.INFO, noiselevel=-1)
			else:
				writemsg_level(
					_("%d config files in '%s' need updating.\n") % \
					(len(x[1]), x[0]), level=logging.INFO, noiselevel=-1)

	if result:
		print(" " + yellow("*") + " See the " +
			colorize("INFORM", _("CONFIGURATION FILES")) + " and " +
			colorize("INFORM", _("CONFIGURATION FILES UPDATE TOOLS")))
		print(" " + yellow("*") + " sections of the " + bold("emerge") + " " +
			_("man page to learn how to update config files."))
Exemple #5
0
    def check_with_cmake(self):
        dirname = os.path.dirname(__file__)
        cmake_command = ['cmake', dirname]
        try:
            cmake_command += ['-G', os.environ['GENERATOR']]
        except KeyError:
            pass
        print(yellow('calling: ' + cmake_command[0] + ' ' + cmake_command[1]))
        cmake = subprocess.Popen(cmake_command,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
        cmake_out, cmake_err = cmake.communicate()
        coin_vars = [
            'COIN_FOUND', 'COIN_VERSION', 'COIN_INCLUDE_DIR', 'COIN_LIB_DIR'
        ]
        soqt_vars = [
            'SOQT_FOUND', 'SOQT_VERSION', 'SOQT_INCLUDE_DIR', 'SOQT_LIB_DIR'
        ]
        config_dict = {}
        if cmake.returncode == 0:
            for line in cmake_out.decode("utf-8").split("\n"):
                for var in coin_vars + soqt_vars:
                    if var in line:
                        line = (line.replace('-- ' + var,
                                             '').replace(': ',
                                                         '').replace('\n', ''))
                        config_dict[var] = line

        print(yellow('\nchecking for COIN via cmake'))
        for key in coin_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        print(yellow('\nchecking for SOQT via cmake'))
        for key in soqt_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        if config_dict.get('SOQT_FOUND', 'false') == 'false':
            pivy_build.MODULES.pop('soqt')
            print(red("\ndisable soqt, because cmake couldn't find it"))
        else:
            try:
                import qtinfo
                self.QTINFO = qtinfo.QtInfo()
            except Exception as e:
                import traceback
                print(
                    red("\ndisable soqt, because there was a problem running qtinfo (needs qmake)"
                        ))
                print(red("-" * 60))
                print(red(traceback.print_exc()))
                print(red("-" * 60))
                pivy_build.MODULES.pop('soqt')

        self.cmake_config_dict = config_dict
        if self.cmake_config_dict.get('COIN_FOUND', 'false') == 'false':
            raise (RuntimeError(
                'coin was not found, but you need coin to build pivy'))
Exemple #6
0
 def check_coin_version(self):
     "check the Coin version"
     if sys.platform == "win32": return
     if not self.check_cmd_exists("coin-config"):
         sys.exit(1)
     print blue("Coin version..."),
     version = self.do_os_popen("coin-config --version")
     print blue("%s" % version)
     if not version.startswith('3'):
         print yellow("** Warning: Pivy has only been tested with Coin "
                      "versions Coin-dev 3.")
Exemple #7
0
    def check_with_cmake(self):
        dirname = os.path.dirname(__file__)
        cmake_command = ['cmake', dirname]
        try:
            cmake_command += ['-G', os.environ['GENERATOR']]
        except KeyError:
            pass
        print(yellow('calling: ' + cmake_command[0] + ' ' + cmake_command[1]))
        cmake = subprocess.Popen(cmake_command, stdout=subprocess.PIPE)
        cmake_out, _ = cmake.communicate()
        coin_vars = [
            'COIN_FOUND', 'COIN_VERSION', 'COIN_INCLUDE_DIR', 'COIN_LIB_DIR'
        ]
        soqt_vars = [
            'SOQT_FOUND', 'SOQT_VERSION', 'SOQT_INCLUDE_DIR', 'SOQT_LIB_DIR'
        ]
        config_dict = {}
        if cmake.returncode == 0:
            for line in cmake_out.decode("utf-8").split("\n"):
                for var in coin_vars + soqt_vars:
                    if var in line:
                        line = (line.replace('-- ' + var,
                                             '').replace(': ',
                                                         '').replace('\n', ''))
                        config_dict[var] = line

        # Added overwrite of SOQT_INCLUDE_DIR, because cmake is identifying it incorrectly
        config_dict["SOQT_INCLUDE_DIR"] = "/usr/local/include"
        ##########

        print(yellow('\nchecking for COIN via cmake'))
        for key in coin_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        print(yellow('\nchecking for SOQT via cmake'))
        for key in soqt_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        if config_dict['SOQT_FOUND'] == 'false':
            pivy_build.MODULES.pop('soqt')
            print(red("\ndisable soqt, because cmake couldn't find it"))

        self.cmake_config_dict = config_dict
        if not bool(self.cmake_config_dict['COIN_FOUND']):
            raise (RuntimeError(
                'coin was not found, but you need coin to build pivy'))

        if not bool(self.cmake_config_dict['SOQT_FOUND']):
            raise (RuntimeError(
                'soqt was not found, but you need soqt to build pivy'))
Exemple #8
0
    def check_simvoleon_version(self):
        "return if SIMVoleon is available and check the version"
        if sys.platform == "win32" or not self.check_cmd_exists("simvoleon-config"):
            self.MODULES.pop('simvoleon', None)
            return False

        print blue("SIMVoleon version..."),
        version = self.do_os_popen("simvoleon-config --version")
        print blue("%s" % version)
        if not version.startswith('2.0'):
            print yellow("** Warning: Pivy has only been tested with SIMVoleon "
                         "versions 2.0.x.")
        return True
Exemple #9
0
 def check_swig_version(self, swig):
     "check for the swig version"
     global SWIG_VERSION
     if not self.check_cmd_exists(swig):
         sys.exit(1)
     print blue("Checking for SWIG version..."),
     p = subprocess.Popen("%s -version" % swig, 
                          shell=True, stdout=subprocess.PIPE)
     version = p.stdout.readlines()[1].strip().split(" ")[2]
     p.stdout.close()
     print blue("%s" % version)
     SWIG_VERSION = version
     if not version in self.SUPPORTED_SWIG_VERSIONS:
         print yellow("Warning: Pivy has only been tested with the following " + \
                      "SWIG versions: %s." % " ".join(self.SUPPORTED_SWIG_VERSIONS))
Exemple #10
0
    def get_coin_features(self):
        '''
        set the global variable SWIG_COND_SYMBOLS needed for conditional wrapping
        '''
        print(yellow('\ncoin-features are not supported in this version'))
        return  #TODO

        if sys.platform == "win32":
            return
        print(blue("Checking for Coin features..."))
        if not os.system("coin-config --have-feature 3ds_import"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_3DS_IMPORT")
            print(green("3ds import "))

        if not os.system("coin-config --have-feature vrml97"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_VRML97")
            print(green("vrml97 "))

        if not os.system("coin-config --have-feature sound"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_SOUND")
            print(green("sound "))

        if not os.system("coin-config --have-feature superglu"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_SUPERGLUE")
            print(green("superglu "))

        if not os.system("coin-config --have-feature threads"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_THREADS")
            print(green("threads "))

        if not os.system("coin-config --have-feature threadsafe"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_THREADSAFE")
            print(green("threadsafe "))

        print()
Exemple #11
0
def rebuild(logger, assigned, settings):
	"""rebuilds the assigned pkgs"""

	args = settings['pass_through_options']
	if settings['EXACT']:
		emerge_command = '=' + ' ='.join(assigned)
	else:
		emerge_command = ' '.join(get_slotted_cps(assigned, logger))
	if settings['PRETEND']:
		args += ' --pretend'
	if settings['VERBOSITY'] >= 2:
		args += ' --verbose'
	elif settings['VERBOSITY'] < 1:
		args += ' --quiet'
	if settings['nocolor']:
		args += ' --color n'

	if len(emerge_command) == 0:
		logger.warn(bold('\nThere is nothing to emerge. Exiting.'))
		return 0

	emerge_command = emerge_command

	logger.warn(yellow(
		'\nemerge') + args +
		' --oneshot --complete-graph=y ' +
		bold(emerge_command))

	success = os.system(
		'emerge ' + args +
		' --oneshot --complete-graph=y ' +
		emerge_command)
	return success
Exemple #12
0
	def __init__(self, scanned_files, logger, searchlibs=None, searchbits=None,
				all_masks=None, masked_dirs=None):
		'''LibCheck init function.

		@param scanned_files: optional dictionary if the type created by
				scan_files().  Defaults to the class instance of scanned_files
		@param logger: python style Logging function to use for output.
		@param searchlibs: optional set() of libraries to search for. If defined
				it toggles several settings to configure this class for
				a target search rather than a broken libs search.
		'''
		self.scanned_files = scanned_files
		self.logger = logger
		self.searchlibs = searchlibs
		self.searchbits = sorted(searchbits) or ['32', '64']
		self.all_masks = all_masks
		self.masked_dirs = masked_dirs
		self.logger.debug("\tLibCheck.__init__(), new searchlibs: %s" %(self.searchbits))
		if searchlibs:
			self.smsg = '\tLibCheck.search(), Checking for %s bit dependants'
			self.pmsg = yellow(" * ") + 'Files that depend on: %s (%s bits)'
			self.setlibs = self._setslibs
			self.check = self._checkforlib
		else:
			self.smsg = '\tLibCheck.search(), Checking for broken %s bit libs'
			self.pmsg = green(' * ') + bold('Broken files that require:') + ' %s (%s bits)'
			self.setlibs = self._setlibs
			self.check = self._checkbroken
		self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds"
		self.alllibs = None
Exemple #13
0
    def check_simvoleon_version(self):
        '''return if SIMVoleon is available and check the version'''
        print(yellow('\ncheck_simvoleon_version is not supported in this version'))
        return #TODO

        if sys.platform == "win32" or not self.check_cmd_exists("simvoleon-config"):
            self.MODULES.pop('simvoleon', None)
            return False

        print(blue("SIMVoleon version..."))
        version = self.do_os_popen("simvoleon-config --version")
        print(blue("%s" % version))
        if not version.startswith('2.0'):
            print(yellow("** Warning: Pivy has only been tested with SIMVoleon "
                         "versions 2.0.x."))
        return True
Exemple #14
0
def rebuild(logger, assigned, settings):
    """rebuilds the assigned pkgs"""

    args = list(settings["pass_through_options"])
    if settings["EXACT"]:
        _assigned = filter_masked(assigned, logger)
        emerge_command = ["=" + a for a in _assigned]
    else:
        _assigned = get_slotted_cps(assigned, logger)
        emerge_command = [a for a in _assigned]
    if settings["PRETEND"]:
        args.append("--pretend")
    if settings["VERBOSITY"] >= 2:
        args.append("--verbose")
    elif settings["VERBOSITY"] < 1:
        args.append("--quiet")
    if settings["nocolor"]:
        args.extend(["--color", "n"])

    if len(emerge_command) == 0:
        logger.warning(bold("\nThere is nothing to emerge. Exiting."))
        return 0

    logger.warning(
        yellow("\nemerge") + " " + " ".join(args) +
        " --oneshot --complete-graph=y " + bold(" ".join(emerge_command)))

    stime = current_milli_time()
    _args = ["emerge"] + args + ["--oneshot", "--complete-graph=y"
                                 ] + emerge_command
    success = subprocess.call(_args)
    ftime = current_milli_time()
    logger.debug("\trebuild(); emerge call for %d ebuilds took: %s seconds" %
                 (len(_assigned), str((ftime - stime) / 1000.0)))
    return success
Exemple #15
0
def rebuild(logger, assigned, settings):
    """rebuilds the assigned pkgs"""

    args = settings['pass_through_options']
    if settings['EXACT']:
        _assigned = filter_masked(assigned, logger)
        emerge_command = '=' + ' ='.join(_assigned)
    else:
        _assigned = get_slotted_cps(assigned, logger)
        emerge_command = ' '.join(_assigned)
    if settings['PRETEND']:
        args += ' --pretend'
    if settings['VERBOSITY'] >= 2:
        args += ' --verbose'
    elif settings['VERBOSITY'] < 1:
        args += ' --quiet'
    if settings['nocolor']:
        args += ' --color n'

    if len(emerge_command) == 0:
        logger.warning(bold('\nThere is nothing to emerge. Exiting.'))
        return 0

    logger.warning(
        yellow('\nemerge') + args + ' --oneshot --complete-graph=y ' +
        bold(emerge_command))

    stime = current_milli_time()
    _args = 'emerge ' + args + ' --oneshot --complete-graph=y ' + emerge_command
    _args = _args.split()
    success = subprocess.call(_args)
    ftime = current_milli_time()
    logger.debug("\trebuild(); emerge call for %d ebuilds took: %s seconds" %
                 (len(_assigned), str((ftime - stime) / 1000.0)))
    return success
Exemple #16
0
 def check_swig_version(self, swig):
     "check for the swig version"
     global SWIG_VERSION
     if not self.check_cmd_exists(swig):
         # on some systems there is only a swig3.0 so check for this and
         # set SWIG to "swig3.0"
         swig = "swig3.0"
         if not self.check_cmd_exists(swig):
             sys.exit(1)
         else:
             self.SWIG = swig
     print(blue("Checking for SWIG version..."))
     p = subprocess.Popen("%s -version" % swig,
                          shell=True,
                          stdout=subprocess.PIPE)
     version = str(p.stdout.readlines()[1].strip()).split(" ")[2]
     if version[-1] == "'":
         version = version[:-1]
     p.stdout.close()
     print(blue("%s" % version))
     SWIG_VERSION = version
     if version not in self.SUPPORTED_SWIG_VERSIONS:
         print(
             yellow(
                 "Warning: Pivy has only been tested with the following " +
                 "SWIG versions: %s." %
                 " ".join(self.SUPPORTED_SWIG_VERSIONS)))
Exemple #17
0
def rebuild(logger, assigned, settings):
	"""rebuilds the assigned pkgs"""
	
	args = settings['pass_through_options']
	if settings['EXACT']:
		emerge_command = '=' + ' ='.join(assigned)
	else:
		emerge_command = ' '.join(get_slotted_cps(assigned, logger))
	if settings['PRETEND']:
		args += ' --pretend'
	if settings['VERBOSITY'] >= 2:
		args += ' --verbose'
	elif settings['VERBOSITY'] < 1:
		args += ' --quiet'
	if settings['nocolor']:
		args += ' --color n'

	if len(emerge_command) == 0:
		logger.warn(bold('\nThere is nothing to emerge. Exiting.'))
		return 0

	emerge_command = emerge_command

	logger.warn(yellow(
		'\nemerge') + args + 
		' --oneshot --complete-graph=y ' +
		bold(emerge_command))
	
	success = os.system(
		'emerge ' + args + 
		' --oneshot --complete-graph=y ' + 
		emerge_command)
	return success
Exemple #18
0
    def check_coin_version(self):
        '''
        check the Coin version
        '''
        print(yellow('\ncheck_coin_version is not supported in this version'))
        print(yellow('coin-bindings are build by default'))
        print(yellow('checks have been disabled because of missing config files'))
        print(yellow('make sure you have installed the coin library + headers!'))
        return #TODO

        if sys.platform == "win32":
            return
        if not self.check_cmd_exists("coin-config"):
            sys.exit(1)
        print(blue("Coin version..."))
        version = self.do_os_popen("coin-config --version")
        print(blue("%s" % version))
Exemple #19
0
	def chk_updated_cfg_files(self, eroot, config_protect):
		target_root = eroot
		result = list(portage.util.find_updated_config_files(target_root, config_protect))

		print("DEBUG: scanning /etc for config files....")

		for x in result:
			print("\n"+colorize("WARN", " * IMPORTANT:"), end=' ')
			if not x[1]: # it's a protected file
				print("config file '%s' needs updating." % x[0])
			else: # it's a protected dir
				print("%d config files in '%s' need updating." % (len(x[1]), x[0]))
	
		if result:
			print(" "+yellow("*")+" See the "+colorize("INFORM","CONFIGURATION FILES")\
					+ " section of the " + bold("emerge"))
			print(" "+yellow("*")+" man page to learn how to update config files.")
Exemple #20
0
    def _validate_rsync_opts(self, rsync_opts, syncuri):
        # The below validation is not needed when using the above hardcoded
        # defaults.

        portage.writemsg(
            "Using PORTAGE_RSYNC_OPTS instead of hardcoded defaults\n", 1)
        rsync_opts.extend(
            portage.util.shlex_split(
                self.settings.get("PORTAGE_RSYNC_OPTS", "")))
        for opt in ("--recursive", "--times"):
            if opt not in rsync_opts:
                portage.writemsg(
                    yellow("WARNING:") + " adding required option " +
                    "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
                rsync_opts.append(opt)

        for exclude in ("distfiles", "local", "packages"):
            opt = "--exclude=/%s" % exclude
            if opt not in rsync_opts:
                portage.writemsg(
                    yellow("WARNING:") +
                    " adding required option %s not included in " % opt +
                    "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n"
                )
                rsync_opts.append(opt)

        if syncuri.rstrip("/").endswith(".gentoo.org/gentoo-portage"):

            def rsync_opt_startswith(opt_prefix):
                for x in rsync_opts:
                    if x.startswith(opt_prefix):
                        return (1, False)
                return (0, False)

            if not rsync_opt_startswith("--timeout="):
                rsync_opts.append("--timeout=%d" % self.timeout)

            for opt in ("--compress", "--whole-file"):
                if opt not in rsync_opts:
                    portage.writemsg(
                        yellow("WARNING:") + " adding required option " +
                        "%s not included in PORTAGE_RSYNC_OPTS\n" % opt)
                    rsync_opts.append(opt)
        return rsync_opts
Exemple #21
0
    def _validate_rsync_opts(self, rsync_opts, syncuri):
        # The below validation is not needed when using the above hardcoded
        # defaults.

        portage.writemsg("Using PORTAGE_RSYNC_OPTS instead of hardcoded defaults\n", 1)
        rsync_opts.extend(portage.util.shlex_split(self.settings.get("PORTAGE_RSYNC_OPTS", "")))
        for opt in ("--recursive", "--times"):
            if opt not in rsync_opts:
                portage.writemsg(
                    yellow("WARNING:") + " adding required option " + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
                )
                rsync_opts.append(opt)

        for exclude in ("distfiles", "local", "packages"):
            opt = "--exclude=/%s" % exclude
            if opt not in rsync_opts:
                portage.writemsg(
                    yellow("WARNING:")
                    + " adding required option %s not included in " % opt
                    + "PORTAGE_RSYNC_OPTS (can be overridden with --exclude='!')\n"
                )
                rsync_opts.append(opt)

        if syncuri.rstrip("/").endswith(".gentoo.org/gentoo-portage"):

            def rsync_opt_startswith(opt_prefix):
                for x in rsync_opts:
                    if x.startswith(opt_prefix):
                        return (1, False)
                return (0, False)

            if not rsync_opt_startswith("--timeout="):
                rsync_opts.append("--timeout=%d" % self.timeout)

            for opt in ("--compress", "--whole-file"):
                if opt not in rsync_opts:
                    portage.writemsg(
                        yellow("WARNING:")
                        + " adding required option "
                        + "%s not included in PORTAGE_RSYNC_OPTS\n" % opt
                    )
                    rsync_opts.append(opt)
        return rsync_opts
Exemple #22
0
 def check_coin_version(self):
     "check the Coin version"
     if sys.platform == "win32": return
     if not self.check_cmd_exists("coin-config"):
         sys.exit(1)
     print(blue("Coin version..."))
     version = self.do_os_popen("coin-config --version")
     print(blue("%s" % version))
     if not version.startswith('3'):
         print(
             yellow("** Warning: Pivy has only been tested with Coin "
                    "versions Coin-dev 3."))
Exemple #23
0
def extract_dependencies_from_la(la, libraries, to_check, logger):
    broken = []

    libnames = []
    for lib in libraries:
        match = re.match(r".+\/(.+)\.(so|la|a)(\..+)?", lib)
        if match is not None:
            libname = match.group(1)
            if libname not in libnames:
                libnames += [
                    libname,
                ]

    for _file in la:
        if not os.path.exists(_file):
            continue

        for line in open(
                _unicode_encode(_file, encoding=_encodings["fs"]),
                mode="r",
                encoding=_encodings["content"],
        ).readlines():
            line = line.strip()
            if line.startswith("dependency_libs="):
                match = re.match(r"dependency_libs='([^']+)'", line)
                if match is not None:
                    for el in match.group(1).split(" "):
                        el = el.strip()
                        if len(el) < 1 or el.startswith("-L") or el.startswith(
                                "-R"):
                            continue

                        if el.startswith("-l") and "lib" + el[2:] in libnames:
                            pass
                        elif el in la or el in libraries:
                            pass
                        else:
                            if to_check:
                                _break = False
                                for tc in to_check:
                                    if tc in el:
                                        _break = True
                                        break
                                if not _break:
                                    continue

                            logger.info("\t" + yellow(" * ") + _file +
                                        " is broken (requires: " + bold(el) +
                                        ")")
                            broken.append(_file)
    return broken
Exemple #24
0
	def _get_installed_best(self, pkg, pkg_info):
		""" we need to use "--emptrytree" testing here rather than
		"empty" param testing because "empty"
		param is used for -u, where you still *do* want to see when
		something is being upgraded.

		@param pkg: _emerge.Package.Package instance
		@param pkg_info: dictionay
		@rtype addl, myoldbest: list, myinslotlist: list
		Modifies self.counters.reinst, self.counters.binary, self.counters.new

		"""
		myoldbest = []
		myinslotlist = None
		installed_versions = self.vardb.match_pkgs(pkg.cp)
		if self.vardb.cpv_exists(pkg.cpv):
			addl = "  "+yellow("R")+pkg_info.fetch_symbol+"  "
			installed_version = self.vardb.match_pkgs(pkg.cpv)[0]
			if not self.quiet_repo_display and installed_version.repo != pkg.repo:
				myoldbest = [installed_version]
			if pkg_info.ordered:
				if pkg_info.merge:
					self.counters.reinst += 1
					if pkg.type_name == "binary":
						self.counters.binary += 1
				elif pkg_info.operation == "uninstall":
					self.counters.uninst += 1
		# filter out old-style virtual matches
		elif installed_versions and \
			installed_versions[0].cp == pkg.cp:
			myinslotlist = self.vardb.match_pkgs(pkg.slot_atom)
			# If this is the first install of a new-style virtual, we
			# need to filter out old-style virtual matches.
			if myinslotlist and \
				myinslotlist[0].cp != pkg.cp:
				myinslotlist = None
			if myinslotlist:
				myoldbest = myinslotlist[:]
				addl = self._insert_slot(pkg, pkg_info, myinslotlist)
			else:
				myoldbest = installed_versions
				addl = self._new_slot(pkg, pkg_info)
			if self.conf.changelog:
				self.do_changelog(pkg, pkg_info)
		else:
			addl = " " + green("N") + " " + pkg_info.fetch_symbol + "  "
			if pkg_info.ordered:
				self.counters.new += 1
				if pkg.type_name == "binary":
					self.counters.binary += 1
		return addl, myoldbest, myinslotlist
Exemple #25
0
    def _get_installed_best(self, pkg, pkg_info):
        """ we need to use "--emptrytree" testing here rather than
		"empty" param testing because "empty"
		param is used for -u, where you still *do* want to see when
		something is being upgraded.

		@param pkg: _emerge.Package.Package instance
		@param pkg_info: dictionay
		@rtype addl, myoldbest: list, myinslotlist: list
		Modifies self.counters.reinst, self.counters.binary, self.counters.new

		"""
        myoldbest = []
        myinslotlist = None
        installed_versions = self.vardb.match_pkgs(pkg.cp)
        if self.vardb.cpv_exists(pkg.cpv):
            addl = "  " + yellow("R") + pkg_info.fetch_symbol + "  "
            installed_version = self.vardb.match_pkgs(pkg.cpv)[0]
            if not self.quiet_repo_display and installed_version.repo != pkg.repo:
                myoldbest = [installed_version]
            if pkg_info.ordered:
                if pkg_info.merge:
                    self.counters.reinst += 1
                    if pkg.type_name == "binary":
                        self.counters.binary += 1
                elif pkg_info.operation == "uninstall":
                    self.counters.uninst += 1
        # filter out old-style virtual matches
        elif installed_versions and \
         installed_versions[0].cp == pkg.cp:
            myinslotlist = self.vardb.match_pkgs(pkg.slot_atom)
            # If this is the first install of a new-style virtual, we
            # need to filter out old-style virtual matches.
            if myinslotlist and \
             myinslotlist[0].cp != pkg.cp:
                myinslotlist = None
            if myinslotlist:
                myoldbest = myinslotlist[:]
                addl = self._insert_slot(pkg, pkg_info, myinslotlist)
            else:
                myoldbest = installed_versions
                addl = self._new_slot(pkg, pkg_info)
            if self.conf.changelog:
                self.do_changelog(pkg, pkg_info)
        else:
            addl = " " + green("N") + " " + pkg_info.fetch_symbol + "  "
            if pkg_info.ordered:
                self.counters.new += 1
                if pkg.type_name == "binary":
                    self.counters.binary += 1
        return addl, myoldbest, myinslotlist
Exemple #26
0
def assign_packages(broken, logger, settings):
	''' Finds and returns packages that owns files placed in broken.
		Broken is list of files
	'''
	assigned = set()
	for group in os.listdir(settings['PKG_DIR']):
		if group in IGNORED:
			continue
		elif os.path.isfile(settings['PKG_DIR'] + group):
			if not group.startswith('.keep_'):
				logger.warn(yellow(" * Invalid category found in the installed pkg db: ") +
					bold(settings['PKG_DIR'] + group))
			continue
		for pkg in os.listdir(settings['PKG_DIR'] + group):
			if '-MERGING-' in pkg:
				logger.warn(yellow(" * Invalid/incomplete package merge found in the installed pkg db: ") +
						bold(settings['PKG_DIR'] + pkg))
				continue
			_file = settings['PKG_DIR'] + group + '/' + pkg + '/CONTENTS'
			if os.path.exists(_file):
				try:
					with open(_file, 'r') as cnt:
						for line in cnt:
							matches = re.match('^obj (/[^ ]+)', line)
							if matches is not None:
								match = matches.group(1)
								if match in broken:
									found = group+'/'+pkg
									if found not in assigned:
										assigned.add(found)
									logger.info('\t' + match + ' -> '
										+ bold(found))
				except Exception as ex:
					logger.warn(red(' !! Failed to read ' + _file) +
						" Original exception was:\n" + str(ex))

	return assigned
Exemple #27
0
def extract_dependencies_from_la(la, libraries, to_check, logger):
    broken = []

    libnames = []
    for lib in libraries:
        match = re.match('.+\/(.+)\.(so|la|a)(\..+)?', lib)
        if match is not None:
            libname = match.group(1)
            if libname not in libnames:
                libnames += [
                    libname,
                ]

    for _file in la:
        if not os.path.exists(_file):
            continue

        for line in open(_unicode_encode(_file, encoding=_encodings['fs']),
                         mode='r',
                         encoding=_encodings['content']).readlines():
            line = line.strip()
            if line.startswith('dependency_libs='):
                match = re.match("dependency_libs='([^']+)'", line)
                if match is not None:
                    for el in match.group(1).split(' '):
                        el = el.strip()
                        if (len(el) < 1 or el.startswith('-L')
                                or el.startswith('-R')):
                            continue

                        if el.startswith('-l') and 'lib' + el[2:] in libnames:
                            pass
                        elif el in la or el in libraries:
                            pass
                        else:
                            if to_check:
                                _break = False
                                for tc in to_check:
                                    if tc in el:
                                        _break = True
                                        break
                                if not _break:
                                    continue

                            logger.info('\t' + yellow(' * ') + _file +
                                        ' is broken (requires: ' + bold(el) +
                                        ')')
                            broken.append(_file)
    return broken
Exemple #28
0
def collect_binaries_from_dir(dirs, mask, logger):
    ''' Collects all binaries from specified list of directories.
		mask is list of pathes, that are ommited in scanning,
		can be eighter single file or entire directory
		Returns list of binaries
	'''

    # contains list of directories found
    # allows us to reduce number of fnc calls
    found_directories = []
    found_files = []

    for _dir in dirs:
        if _dir in mask:
            continue

        try:
            for listing in os.listdir(_dir):
                listing = os.path.join(_dir, listing)
                if listing in mask:
                    continue

                if os.path.isdir(listing):
                    if os.path.islink(listing):
                        #we do not want scan symlink-directories
                        pass
                    else:
                        found_directories.append(listing)
                elif os.path.isfile(listing):
                    # we're looking for binaries
                    # and with binaries we do not need links
                    # thus we can optimize a bit
                    if not os.path.islink(listing):
                        prv = os.stat(listing)[stat.ST_MODE]
                        if prv & stat.S_IXUSR == stat.S_IXUSR or \
                          prv & stat.S_IXGRP == stat.S_IXGRP or \
                          prv & stat.S_IXOTH == stat.S_IXOTH:
                            found_files.append(listing)
        except Exception as ex:
            logger.debug(
                yellow('Exception during binaries collecting: ' +
                       blue('%s') % str(ex)))

    if found_directories:
        found_files += collect_binaries_from_dir(found_directories, mask,
                                                 logger)

    return found_files
Exemple #29
0
def masking(mask):
	"""Returns a 'masked by' string."""
	if 'package.mask' in mask or 'profile' in mask:
		# use porthole wrap style to help clarify meaning
		return output.red("M["+mask[0]+"]")
	if mask is not []:
		for status in mask:
			if 'keyword' in status:
				# keyword masked | " [missing keyword] " <=looks better
				return output.blue("["+status+"]")
			if status in archlist:
				return output.green(status)
			if 'unknown' in status:
				return output.yellow(status)
		return output.red(status)
	return ''
Exemple #30
0
def main_checks(found_libs, broken_list, dependencies, logger):
    ''' Checks for broken dependencies.
		found_libs have to be the same as returned by prepare_checks
		broken_list is list of libraries found by scanelf
		dependencies is the value returned by prepare_checks
	'''

    broken_pathes = []

    for broken in broken_list:
        found = found_libs[broken]
        logger.info('Broken files that requires: ' + bold(found))
        for dep_path in dependencies[broken]:
            logger.info(yellow(' * ') + dep_path)
            broken_pathes.append(dep_path)
    return broken_pathes
Exemple #31
0
def main_checks(found_libs, broken_list, dependencies, logger):
    """ Checks for broken dependencies.
		found_libs have to be the same as returned by prepare_checks
		broken_list is list of libraries found by scanelf
		dependencies is the value returned by prepare_checks
	"""

    broken_pathes = []

    for broken in broken_list:
        found = found_libs[broken]
        logger.info("Broken files that requires: " + bold(found))
        for dep_path in dependencies[broken]:
            logger.info(yellow(" * ") + dep_path)
            broken_pathes.append(dep_path)
    return broken_pathes
Exemple #32
0
def masking(mask):
    """Returns a 'masked by' string."""
    if "package.mask" in mask or "profile" in mask:
        # use porthole wrap style to help clarify meaning
        return output.red("M[" + mask[0] + "]")
    if mask is not []:
        for status in mask:
            if "keyword" in status:
                # keyword masked | " [missing keyword] " <=looks better
                return output.blue("[" + status + "]")
            if status in archlist:
                return output.green(status)
            if "unknown" in status:
                return output.yellow(status)
        return output.red(status)
    return ""
Exemple #33
0
def collect_binaries_from_dir(dirs, mask, logger):
	''' Collects all binaries from specified list of directories.
		mask is list of pathes, that are ommited in scanning,
		can be eighter single file or entire directory
		Returns list of binaries
	'''

	# contains list of directories found
	# allows us to reduce number of fnc calls
	found_directories = []  
	found_files = []

	for _dir in dirs:
		if _dir in mask:
			continue

		try:
			for listing in os.listdir(_dir):
				listing = os.path.join(_dir, listing)
				if listing in mask:
					continue

				if os.path.isdir(listing):
					if os.path.islink(listing):
						#we do not want scan symlink-directories
						pass
					else:
						found_directories.append(listing)
				elif os.path.isfile(listing):
					# we're looking for binaries
					# and with binaries we do not need links
					# thus we can optimize a bit
					if not os.path.islink(listing):
						prv = os.stat(listing)[stat.ST_MODE]
						if prv & stat.S_IXUSR == stat.S_IXUSR or \
								prv & stat.S_IXGRP == stat.S_IXGRP or \
								prv & stat.S_IXOTH == stat.S_IXOTH:
							found_files.append(listing)
		except Exception as ex:
			logger.debug(
				yellow('Exception during binaries collecting: '+
				blue('%s') %str(ex)))

	if found_directories:
		found_files += collect_binaries_from_dir(found_directories, mask, logger)

	return found_files
Exemple #34
0
    def __str__(self):
        output = []

        if self.interactive:
            output.append(colorize("WARN", "I"))
        else:
            output.append(" ")

        if self.new or self.force_reinstall:
            if self.force_reinstall:
                output.append(red("r"))
            else:
                output.append(green("N"))
        else:
            output.append(" ")

        if self.new_slot or self.replace:
            if self.replace:
                output.append(yellow("R"))
            else:
                output.append(green("S"))
        else:
            output.append(" ")

        if self.fetch_restrict or self.fetch_restrict_satisfied:
            if self.fetch_restrict_satisfied:
                output.append(green("f"))
            else:
                output.append(red("F"))
        else:
            output.append(" ")

        if self.new_version:
            output.append(turquoise("U"))
        else:
            output.append(" ")

        if self.downgrade:
            output.append(blue("D"))
        else:
            output.append(" ")

        if self.mask is not None:
            output.append(self.mask)

        return "".join(output)
Exemple #35
0
def extract_dependencies_from_la(la, libraries, to_check, logger):
	broken = []

	libnames = []
	for lib in libraries:
		match = re.match('.+\/(.+)\.(so|la|a)(\..+)?', lib)
		if match is not None:
			libname = match.group(1)
			if libname not in libnames:
				libnames += [libname, ]

	for _file in la:
		if not os.path.exists(_file):
			continue

		for line in open(_unicode_encode(_file, encoding=_encodings['fs']), mode='r',
			encoding=_encodings['content']).readlines():
			line = line.strip()
			if line.startswith('dependency_libs='):
				match = re.match("dependency_libs='([^']+)'", line)
				if match is not None:
					for el in match.group(1).split(' '):
						el = el.strip()
						if (len(el) < 1 or el.startswith('-L')
							or el.startswith('-R')
							):
							continue

						if el.startswith('-l') and 'lib'+el[2:] in libnames:
							pass
						elif el in la or el in libraries:
							pass
						else:
							if to_check:
								_break = False
								for tc in to_check:
									if tc in el:
										_break = True
										break
								if not _break:
									continue

							logger.info('\t' + yellow(' * ') + _file +
								' is broken (requires: ' + bold(el)+')')
							broken.append(_file)
	return broken
	def __str__(self):
		output = []

		if self.interactive:
			output.append(colorize("WARN", "I"))
		else:
			output.append(" ")

		if self.new or self.force_reinstall:
			if self.force_reinstall:
				output.append(red("r"))
			else:
				output.append(green("N"))
		else:
			output.append(" ")

		if self.new_slot or self.replace:
			if self.replace:
				output.append(yellow("R"))
			else:
				output.append(green("S"))
		else:
			output.append(" ")

		if self.fetch_restrict or self.fetch_restrict_satisfied:
			if self.fetch_restrict_satisfied:
				output.append(green("f"))
			else:
				output.append(red("F"))
		else:
			output.append(" ")

		if self.new_version:
			output.append(turquoise("U"))
		else:
			output.append(" ")

		if self.downgrade:
			output.append(blue("D"))
		else:
			output.append(" ")

		if self.mask is not None:
			output.append(self.mask)

		return "".join(output)
Exemple #37
0
def parse_conf(conf_file, visited=None, logger=None):
    """Parses supplied conf_file for libraries pathes.
    conf_file is file or files to parse
    visited is set of files already parsed
    """
    lib_dirs = set()
    to_parse = set()

    if isinstance(conf_file, str):
        conf_file = [conf_file]

    for conf in conf_file:
        try:
            with open(
                    _unicode_encode(conf, encoding=_encodings["fs"]),
                    encoding=_encodings["content"],
            ) as _file:
                for line in _file.readlines():
                    line = line.strip()
                    if line.startswith("#"):
                        continue
                    elif line.startswith("include"):
                        include_line = line.split()[1:]
                        for included in include_line:
                            if not included.startswith("/"):
                                path = os.path.join(os.path.dirname(conf),
                                                    included)
                            else:
                                path = included

                            to_parse.update(glob.glob(path))
                    else:
                        lib_dirs.add(line)
        except EnvironmentError:
            logger.warn("\t" + yellow("Error when parsing file %s" % conf))

    if visited is None:
        visited = set()

    visited.update(conf_file)
    to_parse = to_parse.difference(visited)
    if to_parse:
        lib_dirs.update(parse_conf(to_parse, visited, logger=logger))

    return lib_dirs
Exemple #38
0
def parse_conf(conf_file, visited=None, logger=None):
	''' Parses supplied conf_file for libraries pathes.
		conf_file is file or files to parse
		visited is set of files already parsed
	'''
	lib_dirs = set()
	to_parse = set()

	if isinstance(conf_file, _basestring):
		conf_file = [conf_file]

	for conf in conf_file:
		try:
			with open(_unicode_encode(conf, encoding=_encodings['fs']),
					encoding=_encodings['content']) as _file:
				for line in _file.readlines():
					line = line.strip()
					if line.startswith('#'):
						continue
					elif line.startswith('include'):
						include_line = line.split()[1:]
						for included in include_line:
							if not included.startswith('/'):
								path = os.path.join(os.path.dirname(conf), \
													included)
							else:
								path = included

							to_parse.update(glob.glob(path))
					else:
						lib_dirs.add(line)
		except EnvironmentError:
			logger.warn('\t' + yellow('Error when parsing file %s' %conf))

	if visited is None:
		visited = set()

	visited.update(conf_file)
	to_parse = to_parse.difference(visited)
	if to_parse:
		lib_dirs.update(parse_conf(to_parse, visited, logger=logger))

	return lib_dirs
Exemple #39
0
def parse_conf(conf_file, visited=None, logger=None):
    ''' Parses supplied conf_file for libraries pathes.
		conf_file is file or files to parse
		visited is set of files already parsed
	'''
    lib_dirs = set()
    to_parse = set()

    if isinstance(conf_file, str):
        conf_file = [conf_file]

    for conf in conf_file:
        try:
            with open(conf) as _file:
                for line in _file:
                    line = line.strip()
                    if line.startswith('#'):
                        continue
                    elif line.startswith('include'):
                        include_line = line.split()[1:]
                        for included in include_line:
                            if not included.startswith('/'):
                                path = os.path.join(os.path.dirname(conf), \
                                     included)
                            else:
                                path = included

                            to_parse.update(glob.glob(path))
                    else:
                        lib_dirs.add(line)
        except EnvironmentError:
            logger.warn(yellow('Error when parsing file %s' % conf))

    if visited is None:
        visited = set()

    visited.update(conf_file)
    to_parse.difference_update(visited)
    if to_parse:
        lib_dirs.update(parse_conf(to_parse, visited, logger=logger))

    return lib_dirs
Exemple #40
0
def extract_dependencies_from_la(la, libraries, to_check, logger):
    broken = []

    libnames = []
    for lib in libraries:
        match = re.match(".+\/(.+)\.(so|la|a)(\..+)?", lib)
        if match is not None:
            libname = match.group(1)
            if libname not in libnames:
                libnames += [libname]

    for _file in la:
        if not os.path.exists(_file):
            continue

        for line in open(_file, "r").readlines():
            line = line.strip()
            if line.startswith("dependency_libs="):
                match = re.match("dependency_libs='([^']+)'", line)
                if match is not None:
                    for el in match.group(1).split(" "):
                        el = el.strip()
                        if len(el) < 1 or el.startswith("-L") or el.startswith("-R"):
                            continue

                        if el.startswith("-l") and "lib" + el[2:] in libnames:
                            pass
                        elif el in la or el in libraries:
                            pass
                        else:
                            if to_check:
                                _break = False
                                for tc in to_check:
                                    if tc in el:
                                        _break = True
                                        break
                                if not _break:
                                    continue

                            logger.info(yellow(" * ") + _file + " is broken (requires: " + bold(el) + ")")
                            broken.append(_file)
    return broken
Exemple #41
0
	def process_results(self, found_libs, scanned_files=None):
		'''Processes the search results, logs the files found

		@param found_libs: dictionary of the type returned by search()
		@param scanned_files: optional dictionary if the type created by
				scan_files().  Defaults to the class instance of scanned_files
		@ returns: list: of filepaths from teh search results.
		'''
		stime = current_milli_time()
		if not scanned_files:
			scanned_files = self.scanned_files
		found_pathes = []
		for bits, found in found_libs.items():
			for lib, files in found.items():
				self.logger.info(self.pmsg  % (bold(lib), bits))
				for fp in sorted(files):
					self.logger.info('\t' +yellow('* ') + fp)
					found_pathes.append(fp)
		ftime = current_milli_time()
		self.logger.debug("\tLibCheck.process_results(); total filepaths found: "
			"%d in %d milliseconds" % (len(found_pathes), ftime-stime))
		return found_pathes
Exemple #42
0
    def __init__(
        self,
        scanned_files,
        logger,
        searchlibs=None,
        searchbits=None,
        all_masks=None,
        masked_dirs=None,
    ):
        """LibCheck init function.

        @param scanned_files: optional dictionary if the type created by
                        scan_files().  Defaults to the class instance of scanned_files
        @param logger: python style Logging function to use for output.
        @param searchlibs: optional set() of libraries to search for. If defined
                        it toggles several settings to configure this class for
                        a target search rather than a broken libs search.
        """
        self.scanned_files = scanned_files
        self.logger = logger
        self.searchlibs = searchlibs
        self.searchbits = sorted(searchbits) or ["32", "64"]
        self.all_masks = all_masks
        self.masked_dirs = masked_dirs
        self.logger.debug("\tLibCheck.__init__(), new searchlibs: %s" %
                          (self.searchbits))
        if searchlibs:
            self.smsg = "\tLibCheck.search(), Checking for %s bit dependants"
            self.pmsg = yellow(" * ") + "Files that depend on: %s (%s bits)"
            self.setlibs = self._setslibs
            self.check = self._checkforlib
        else:
            self.smsg = "\tLibCheck.search(), Checking for broken %s bit libs"
            self.pmsg = (green(" * ") + bold("Broken files that require:") +
                         " %s (%s bits)")
            self.setlibs = self._setlibs
            self.check = self._checkbroken
        self.sfmsg = "\tLibCheck.search(); Total found: %(count)d libs, %(deps)d files in %(time)d milliseconds"
        self.alllibs = None
Exemple #43
0
    def process_results(self, found_libs, scanned_files=None):
        '''Processes the search results, logs the files found

		@param found_libs: dictionary of the type returned by search()
		@param scanned_files: optional dictionary if the type created by
				scan_files().  Defaults to the class instance of scanned_files
		@ returns: list: of filepaths from teh search results.
		'''
        stime = current_milli_time()
        if not scanned_files:
            scanned_files = self.scanned_files
        found_pathes = []
        for bits, found in found_libs.items():
            for lib, files in found.items():
                self.logger.info(self.pmsg % (bold(lib), bits))
                for fp in sorted(files):
                    self.logger.info('\t' + yellow('* ') + fp)
                    found_pathes.append(fp)
        ftime = current_milli_time()
        self.logger.debug(
            "\tLibCheck.process_results(); total filepaths found: "
            "%d in %d milliseconds" % (len(found_pathes), ftime - stime))
        return found_pathes
Exemple #44
0
def rebuild(logger, assigned, settings):
	"""rebuilds the assigned pkgs"""

	args = settings['pass_through_options']
	if settings['EXACT']:
		_assigned = filter_masked(assigned, logger)
		emerge_command = '=' + ' ='.join(_assigned)
	else:
		_assigned = get_slotted_cps(assigned, logger)
		emerge_command = ' '.join(_assigned)
	if settings['PRETEND']:
		args += ' --pretend'
	if settings['VERBOSITY'] >= 2:
		args += ' --verbose'
	elif settings['VERBOSITY'] < 1:
		args += ' --quiet'
	if settings['nocolor']:
		args += ' --color n'

	if len(emerge_command) == 0:
		logger.warning(bold('\nThere is nothing to emerge. Exiting.'))
		return 0

	logger.warning(yellow(
		'\nemerge') + args +
		' --oneshot --complete-graph=y ' +
		bold(emerge_command))

	stime = current_milli_time()
	_args = 'emerge ' + args + ' --oneshot --complete-graph=y ' + emerge_command
	_args = _args.split()
	success = subprocess.call(_args)
	ftime = current_milli_time()
	logger.debug("\trebuild(); emerge call for %d ebuilds took: %s seconds"
		% (len(_assigned), str((ftime-stime)/1000.0)))
	return success
def _create_use_string(conf, name, cur_iuse, iuse_forced, cur_use,
	old_iuse, old_use,
	is_new, feature_flags, reinst_flags):

	if not conf.print_use_string:
		return ""

	enabled = []
	if conf.alphabetical:
		disabled = enabled
		removed = enabled
	else:
		disabled = []
		removed = []
	cur_iuse = set(cur_iuse)
	enabled_flags = cur_iuse.intersection(cur_use)
	removed_iuse = set(old_iuse).difference(cur_iuse)
	any_iuse = cur_iuse.union(old_iuse)
	any_iuse = list(any_iuse)
	any_iuse.sort()

	for flag in any_iuse:
		flag_str = None
		isEnabled = False
		reinst_flag = reinst_flags and flag in reinst_flags
		if flag in enabled_flags:
			isEnabled = True
			if is_new or flag in old_use and \
				(conf.all_flags or reinst_flag):
				flag_str = red(flag)
			elif flag not in old_iuse:
				flag_str = yellow(flag) + "%*"
			elif flag not in old_use:
				flag_str = green(flag) + "*"
		elif flag in removed_iuse:
			if conf.all_flags or reinst_flag:
				flag_str = yellow("-" + flag) + "%"
				if flag in old_use:
					flag_str += "*"
				flag_str = "(" + flag_str + ")"
				removed.append(flag_str)
			continue
		else:
			if is_new or flag in old_iuse and \
				flag not in old_use and \
				(conf.all_flags or reinst_flag):
				flag_str = blue("-" + flag)
			elif flag not in old_iuse:
				flag_str = yellow("-" + flag)
				if flag not in iuse_forced:
					flag_str += "%"
			elif flag in old_use:
				flag_str = green("-" + flag) + "*"
		if flag_str:
			if flag in feature_flags:
				flag_str = "{" + flag_str + "}"
			elif flag in iuse_forced:
				flag_str = "(" + flag_str + ")"
			if isEnabled:
				enabled.append(flag_str)
			else:
				disabled.append(flag_str)

	if conf.alphabetical:
		ret = " ".join(enabled)
	else:
		ret = " ".join(enabled + disabled + removed)
	if ret:
		ret = '%s="%s" ' % (name, ret)
	return ret
Exemple #46
0
def main(settings=None, logger=None):
	"""Main program operation method....

	@param settings: dict.  defaults to settings.DEFAULTS
	@param logger: python logging module defaults to init_logger(settings)
	@return boolean  success/failure
	"""

	if settings is None:
		print("NO Input settings, using defaults...")
		settings = DEFAULTS.copy()

	if logger is None:
		logger = init_logger(settings)

	_libs_to_check = settings['library']

	if not settings['stdout'].isatty() or settings['nocolor']:
		nocolor()

	#TODO: Development warning
	logger.warn(blue(' * ') +
		yellow('This is a development version, '
			'so it may not work correctly'))
	logger.warn(blue(' * ') +
		yellow('The original revdep-rebuild script is '
			'installed as revdep-rebuild.sh'))

	if os.getuid() != 0 and not settings['PRETEND']:
		logger.warn(blue(' * ') +
			yellow('You are not root, adding --pretend to portage options'))
		settings['PRETEND'] = True

	if settings['library']:
		logger.warn(green(' * ') +
			"Looking for libraries: %s" % (bold(', '.join(settings['library']))))

	if settings['USE_TMP_FILES'] \
			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
		libraries, la_libraries, libraries_links, binaries = read_cache(
			settings['DEFAULT_TMP_DIR'])
		assigned = analyse(
			settings=settings,
			logger=logger,
			libraries=libraries,
			la_libraries=la_libraries,
			libraries_links=libraries_links,
			binaries=binaries,
			_libs_to_check=_libs_to_check)
	else:
		assigned = analyse(settings, logger, _libs_to_check=_libs_to_check)

	if not assigned:
		logger.warn('\n' + bold('Your system is consistent'))
		# return the correct exit code
		return 0

	has_masked = False
	tmp = []
	for ebuild in assigned:
		if get_masking_status(ebuild):
			has_masked = True
			logger.warn('!!! ' + red('All ebuilds that could satisfy: ') +
				green(ebuild) + red(' have been masked'))
		else:
			tmp.append(ebuild)
	assigned = tmp

	if has_masked:
		logger.info(red(' * ') +
			'Unmask all ebuild(s) listed above and call revdep-rebuild '
			'again or manually emerge given packages.')

	success = rebuild(logger, assigned, settings)
	logger.debug("rebuild return code = %i" %success)
	return success
Exemple #47
0
def main(settings=None, logger=None):
	"""Main program operation method....
	
	@param settings: dict.  defaults to settings.DEFAULTS
	@param logger: python logging module defaults to init_logger(settings)
	@return boolean  success/failure
	"""

	if settings is None:
		print("NO Input settings, using defaults...")
		settings = DEFAULTS.copy()

	if logger is None:
		logger = init_logger(settings)

	_libs_to_check = settings['library']

	if not settings['stdout'].isatty() or settings['nocolor']:
		nocolor()

	#TODO: Development warning
	logger.warn(blue(' * ') + 
		yellow('This is a development version, '
			'so it may not work correctly'))
	logger.warn(blue(' * ') + 
		yellow('The original revdep-rebuild script is '
			'installed as revdep-rebuild.sh'))

	if os.getuid() != 0 and not settings['PRETEND']:
		logger.warn(blue(' * ') + 
			yellow('You are not root, adding --pretend to portage options'))
		settings['PRETEND'] = True

	if settings['library']:
		logger.warn(green(' * ') + 
			"Looking for libraries: %s" % (bold(', '.join(settings['library']))))

	if settings['USE_TMP_FILES'] \
			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
		libraries, la_libraries, libraries_links, binaries = read_cache(
			settings['DEFAULT_TMP_DIR'])
		assigned = analyse(
			settings=settings,
			logger=logger,
			libraries=libraries,
			la_libraries=la_libraries, 
			libraries_links=libraries_links,
			binaries=binaries,
			_libs_to_check=_libs_to_check)
	else:
		assigned = analyse(settings, logger, _libs_to_check=_libs_to_check)

	if not assigned:
		logger.warn('\n' + bold('Your system is consistent'))
		# return the correct exit code
		return 0

	has_masked = False
	tmp = []
	for ebuild in assigned:
		if get_masking_status(ebuild):
			has_masked = True
			logger.warn('!!! ' + red('All ebuilds that could satisfy: ') + 
				green(ebuild) + red(' have been masked'))
		else:
			tmp.append(ebuild)
	assigned = tmp

	if has_masked:
		logger.info(red(' * ') + 
			'Unmask all ebuild(s) listed above and call revdep-rebuild '
			'again or manually emerge given packages.')

	success = rebuild(logger, assigned, settings)
	logger.debug("rebuild return code = %i" %success)
	return success
Exemple #48
0
def globaloption(string):
	"""Returns a global option string, i.e. the program global options."""
	return output.yellow(string)
Exemple #49
0
def _create_use_string(
    conf, name, cur_iuse, iuse_forced, cur_use, old_iuse, old_use, is_new, feature_flags, reinst_flags
):

    if not conf.print_use_string:
        return ""

    enabled = []
    if conf.alphabetical:
        disabled = enabled
        removed = enabled
    else:
        disabled = []
        removed = []
    cur_iuse = set(cur_iuse)
    enabled_flags = cur_iuse.intersection(cur_use)
    removed_iuse = set(old_iuse).difference(cur_iuse)
    any_iuse = cur_iuse.union(old_iuse)
    any_iuse = list(any_iuse)
    any_iuse.sort()

    for flag in any_iuse:
        flag_str = None
        isEnabled = False
        reinst_flag = reinst_flags and flag in reinst_flags
        if flag in enabled_flags:
            isEnabled = True
            if is_new or flag in old_use and (conf.all_flags or reinst_flag):
                flag_str = red(flag)
            elif flag not in old_iuse:
                flag_str = yellow(flag) + "%*"
            elif flag not in old_use:
                flag_str = green(flag) + "*"
        elif flag in removed_iuse:
            if conf.all_flags or reinst_flag:
                flag_str = yellow("-" + flag) + "%"
                if flag in old_use:
                    flag_str += "*"
                flag_str = "(" + flag_str + ")"
                removed.append(flag_str)
            continue
        else:
            if is_new or flag in old_iuse and flag not in old_use and (conf.all_flags or reinst_flag):
                flag_str = blue("-" + flag)
            elif flag not in old_iuse:
                flag_str = yellow("-" + flag)
                if flag not in iuse_forced:
                    flag_str += "%"
            elif flag in old_use:
                flag_str = green("-" + flag) + "*"
        if flag_str:
            if flag in feature_flags:
                flag_str = "{" + flag_str + "}"
            elif flag in iuse_forced:
                flag_str = "(" + flag_str + ")"
            if isEnabled:
                enabled.append(flag_str)
            else:
                disabled.append(flag_str)

    if conf.alphabetical:
        ret = " ".join(enabled)
    else:
        ret = " ".join(enabled + disabled + removed)
    if ret:
        ret = '%s="%s" ' % (name, ret)
    return ret
Exemple #50
0
def printUsage(_error=None, help=None):
	"""Print help message. May also print partial help to stderr if an
	error from {'options','actions'} is specified."""

	out = sys.stdout
	if _error:
		out = sys.stderr
	if not _error in ('actions', 'global-options', \
			'packages-options', 'distfiles-options', \
			'merged-packages-options', 'merged-distfiles-options', \
			'time', 'size'):
		_error = None
	if not _error and not help: help = 'all'
	if _error == 'time':
		print( pp.error("Wrong time specification"), file=out)
		print( "Time specification should be an integer followed by a"+
				" single letter unit.", file=out)
		print( "Available units are: y (years), m (months), w (weeks), "+
				"d (days) and h (hours).", file=out)
		print( "For instance: \"1y\" is \"one year\", \"2w\" is \"two"+
				" weeks\", etc. ", file=out)
		return
	if _error == 'size':
		print( pp.error("Wrong size specification"), file=out)
		print( "Size specification should be an integer followed by a"+
				" single letter unit.", file=out)
		print( "Available units are: G, M, K and B.", file=out)
		print("For instance: \"10M\" is \"ten megabytes\", \"200K\" "+
				"is \"two hundreds kilobytes\", etc.", file=out)
		return
	if _error in ('global-options', 'packages-options', 'distfiles-options', \
			'merged-packages-options', 'merged-distfiles-options',):
		print( pp.error("Wrong option on command line."), file=out)
		print( file=out)
	elif _error == 'actions':
		print( pp.error("Wrong or missing action name on command line."), file=out)
		print( file=out)
	print( white("Usage:"), file=out)
	if _error in ('actions','global-options', 'packages-options', \
			'distfiles-options') or help == 'all':
		print( " "+turquoise(__productname__),
			yellow("[global-option] ..."),
			green("<action>"),
			yellow("[action-option] ..."), file=out)
	if _error == 'merged-distfiles-options' or help in ('all','distfiles'):
		print( " "+turquoise(__productname__+'-dist'),
			yellow("[global-option, distfiles-option] ..."), file=out)
	if _error == 'merged-packages-options' or help in ('all','packages'):
		print( " "+turquoise(__productname__+'-pkg'),
			yellow("[global-option, packages-option] ..."), file=out)
	if _error in ('global-options', 'actions'):
		print( " "+turquoise(__productname__),
			yellow("[--help, --version]"), file=out)
	if help == 'all':
		print( " "+turquoise(__productname__+"(-dist,-pkg)"),
			yellow("[--help, --version]"), file=out)
	if _error == 'merged-packages-options' or help == 'packages':
		print( " "+turquoise(__productname__+'-pkg'),
			yellow("[--help, --version]"), file=out)
	if _error == 'merged-distfiles-options' or help == 'distfiles':
		print( " "+turquoise(__productname__+'-dist'),
			yellow("[--help, --version]"), file=out)
	print(file=out)
	if _error in ('global-options', 'merged-packages-options', \
	'merged-distfiles-options') or help:
		print( "Available global", yellow("options")+":", file=out)
		print( yellow(" -C, --nocolor")+
			"             - turn off colors on output", file=out)
		print( yellow(" -d, --deep")+
			"                - only keep the minimum for a reinstallation", file=out)
		print( yellow(" -e, --exclude-file=<path>")+
			" - path to the exclusion file", file=out)
		print( yellow(" -i, --interactive")+
			"         - ask confirmation before deletions", file=out)
		print( yellow(" -n, --package-names")+
			"       - protect all versions (when --deep)", file=out)
		print( yellow(" -p, --pretend")+
			"             - only display what would be cleaned", file=out)
		print( yellow(" -q, --quiet")+
			"               - be as quiet as possible", file=out)
		print( yellow(" -t, --time-limit=<time>")+
			"   - don't delete files modified since "+yellow("<time>"), file=out)
		print( "   "+yellow("<time>"), "is a duration: \"1y\" is"+
				" \"one year\", \"2w\" is \"two weeks\", etc. ", file=out)
		print( "   "+"Units are: y (years), m (months), w (weeks), "+
				"d (days) and h (hours).", file=out)
		print( yellow(" -h, --help")+ \
			"                - display the help screen", file=out)
		print( yellow(" -V, --version")+
			"             - display version info", file=out)
		print( file=out)
	if _error == 'actions' or help == 'all':
		print( "Available", green("actions")+":", file=out)
		print( green(" packages")+
			"     - clean outdated binary packages from PKGDIR", file=out)
		print( green(" distfiles")+
			"    - clean outdated packages sources files from DISTDIR", file=out)
		print( file=out)
	if _error in ('packages-options','merged-packages-options') \
	or help in ('all','packages'):
		print( "Available", yellow("options"),"for the",
				green("packages"),"action:", file=out)
		print( yellow(" -i, --ignore-failure")+
			"             - ignore failure to locate PKGDIR", file=out)
		print( file=out)
	if _error in ('distfiles-options', 'merged-distfiles-options') \
	or help in ('all','distfiles'):
		print("Available", yellow("options"),"for the",
				green("distfiles"),"action:", file=out)
		print( yellow(" -f, --fetch-restricted")+
			"   - protect fetch-restricted files (when --deep)", file=out)
		print( yellow(" -s, --size-limit=<size>")+
			"  - don't delete distfiles bigger than "+yellow("<size>"), file=out)
		print( "   "+yellow("<size>"), "is a size specification: "+
				"\"10M\" is \"ten megabytes\", \"200K\" is", file=out)
		print( "   "+"\"two hundreds kilobytes\", etc.  Units are: "+
				"G, M, K and B.", file=out)
		print( file=out)
	print( "More detailed instruction can be found in",
			turquoise("`man %s`" % __productname__), file=out)
Exemple #51
0
def doAction(action,options,exclude={}, output=None):
	"""doAction: execute one action, ie display a few message, call the right
	find* function, and then call doCleanup with its result."""
	# define vocabulary for the output
	if action == 'packages':
		files_type = "binary packages"
	else:
		files_type = "distfiles"
	saved = {}
	deprecated = {}
	# find files to delete, depending on the action
	if not options['quiet']:
		output.einfo("Building file list for "+action+" cleaning...")
	if action == 'packages':
		clean_me = findPackages(
			options,
			exclude=exclude,
			destructive=options['destructive'],
			package_names=options['package-names'],
			time_limit=options['time-limit'],
			pkgdir=pkgdir,
			#port_dbapi=Dbapi(portage.db[portage.root]["porttree"].dbapi),
			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
		)
	else:
		# accept defaults
		engine = DistfilesSearch(output=options['verbose-output'],
			#portdb=Dbapi(portage.db[portage.root]["porttree"].dbapi),
			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
		)
		clean_me, saved, deprecated = engine.findDistfiles(
			exclude=exclude,
			destructive=options['destructive'],
			fetch_restricted=options['fetch-restricted'],
			package_names=options['package-names'],
			time_limit=options['time-limit'],
			size_limit=options['size-limit'],
			deprecate = options['deprecated']
		)

	# initialize our cleaner
	cleaner = CleanUp(output.progress_controller)

	# actually clean files if something was found
	if clean_me:
		# verbose pretend message
		if options['pretend'] and not options['quiet']:
			output.einfo("Here are the "+files_type+" that would be deleted:")
		# verbose non-pretend message
		elif not options['quiet']:
			output.einfo("Cleaning " + files_type  +"...")
		# do the cleanup, and get size of deleted files
		if  options['pretend']:
			clean_size = cleaner.pretend_clean(clean_me)
		elif action in ['distfiles']:
			clean_size = cleaner.clean_dist(clean_me)
		elif action in ['packages']:
			clean_size = cleaner.clean_pkgs(clean_me,
				pkgdir)
		# vocabulary for final message
		if options['pretend']:
			verb = "would be"
		else:
			verb = "were"
		# display freed space
		if not options['quiet']:
			output.total('normal', clean_size, len(clean_me), verb, action)
	# nothing was found
	elif not options['quiet']:
		output.einfo("Your "+action+" directory was already clean.")
	if saved and not options['quiet']:
		print()
		print( (pp.emph("   The following ") + yellow("unavailable") +
			pp.emph(" files were saved from cleaning due to exclusion file entries")))
		output.set_colors('deprecated')
		clean_size = cleaner.pretend_clean(saved)
		output.total('deprecated', clean_size, len(saved), verb, action)
	if deprecated and not options['quiet']:
		print()
		print( (pp.emph("   The following ") + yellow("unavailable") +
			pp.emph(" installed packages were found")))
		output.set_colors('deprecated')
		output.list_pkgs(deprecated)
Exemple #52
0
def doAction(action,options,exclude={}, output=None):
	"""doAction: execute one action, ie display a few message, call the right
	find* function, and then call doCleanup with its result."""
	# define vocabulary for the output
	if action == 'packages':
		files_type = "binary packages"
	else:
		files_type = "distfiles"
	saved = {}
	deprecated = {}
	# find files to delete, depending on the action
	if not options['quiet']:
		output.einfo("Building file list for "+action+" cleaning...")
	if action == 'packages':
		clean_me = findPackages(
			options,
			exclude=exclude,
			destructive=options['destructive'],
			package_names=options['package-names'],
			time_limit=options['time-limit'],
			pkgdir=pkgdir,
			#port_dbapi=Dbapi(portage.db[portage.root]["porttree"].dbapi),
			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
		)
	else:
		# accept defaults
		engine = DistfilesSearch(output=options['verbose-output'],
			#portdb=Dbapi(portage.db[portage.root]["porttree"].dbapi),
			#var_dbapi=Dbapi(portage.db[portage.root]["vartree"].dbapi),
		)
		clean_me, saved, deprecated = engine.findDistfiles(
			exclude=exclude,
			destructive=options['destructive'],
			fetch_restricted=options['fetch-restricted'],
			package_names=options['package-names'],
			time_limit=options['time-limit'],
			size_limit=options['size-limit'],
			deprecate = options['deprecated']
		)

	# initialize our cleaner
	cleaner = CleanUp( output.progress_controller)

	# actually clean files if something was found
	if clean_me:
		# verbose pretend message
		if options['pretend'] and not options['quiet']:
			output.einfo("Here are the "+files_type+" that would be deleted:")
		# verbose non-pretend message
		elif not options['quiet']:
			output.einfo("Cleaning " + files_type  +"...")
		# do the cleanup, and get size of deleted files
		if  options['pretend']:
			clean_size = cleaner.pretend_clean(clean_me)
		elif action in ['distfiles']:
			clean_size = cleaner.clean_dist(clean_me)
		elif action in ['packages']:
			clean_size = cleaner.clean_pkgs(clean_me,
				pkgdir)
		# vocabulary for final message
		if options['pretend']:
			verb = "would be"
		else:
			verb = "were"
		# display freed space
		if not options['quiet']:
			output.total('normal', clean_size, len(clean_me), verb, action)
	# nothing was found
	elif not options['quiet']:
		output.einfo("Your "+action+" directory was already clean.")
	if saved and not options['quiet']:
		print()
		print( (pp.emph("   The following ") + yellow("unavailable") +
			pp.emph(" files were saved from cleaning due to exclusion file entries")))
		output.set_colors('deprecated')
		clean_size = cleaner.pretend_clean(saved)
		output.total('deprecated', clean_size, len(saved), verb, action)
	if deprecated and not options['quiet']:
		print()
		print( (pp.emph("   The following ") + yellow("unavailable") +
			pp.emph(" installed packages were found")))
		output.set_colors('deprecated')
		output.list_pkgs(deprecated)
Exemple #53
0
def warn(msg, fatal=False, stdout=CONFIG['stdout']):
    print(yellow(" * Warning:"), msg, file=stdout)
    print('', file=stdout)
    if fatal:
        sys.exit(1)
Exemple #54
0
def main(settings=None, logger=None):
	"""Main program operation method....

	@param settings: dict.  defaults to settings.DEFAULTS
	@param logger: python logging module defaults to init_logger(settings)
	@return boolean  success/failure
	"""
	if settings is None:
		print("NO Input settings, using defaults...")
		settings = DEFAULTS.copy()

	if logger is None:
		logger = init_logger(settings)

	_libs_to_check = settings['library']

	if not settings['stdout'].isatty() or settings['nocolor']:
		nocolor()

	logger.warning(blue(' * ') +
		yellow('This is the new python coded version'))
	logger.warning(blue(' * ') +
		yellow('Please report any bugs found using it.'))
	logger.warning(blue(' * ') +
		yellow('The original revdep-rebuild script is '
			'installed as revdep-rebuild.sh'))
	logger.warning(blue(' * ') +
		yellow('Please file bugs at: '
			'https://bugs.gentoo.org/'))

	if os.getuid() != 0 and not settings['PRETEND']:
		logger.warning(blue(' * ') +
			yellow('You are not root, adding --pretend to portage options'))
		settings['PRETEND'] = True

	logger.debug("\tmain(), _libs_to_check = %s" % str(_libs_to_check))

	if settings['USE_TMP_FILES'] \
			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
		libraries, la_libraries, libraries_links, binaries = read_cache(
			settings['DEFAULT_TMP_DIR'])
		assigned, orphaned = analyse(
			settings=settings,
			logger=logger,
			libraries=libraries,
			la_libraries=la_libraries,
			libraries_links=libraries_links,
			binaries=binaries,
			_libs_to_check=_libs_to_check)
	else:
		assigned, orphaned = analyse(settings, logger, _libs_to_check=_libs_to_check)

	if not assigned and not orphaned:
		logger.warning('\n' + bold('Your system is consistent'))
		# return the correct exit code
		return 0
	elif orphaned:
		# blank line for beter visibility of the following lines
		logger.warning('')
		if settings['library']:
			logger.warning(red(' !!! Dependant orphaned files: ') +
				bold('No installed package was found for the following:'))
		else:
			logger.warning(red(' !!! Broken orphaned files: ') +
				bold('No installed package was found for the following:'))
		for filename in orphaned:
			logger.warning(red('\t* ') + filename)

	success = rebuild(logger, assigned, settings)
	logger.debug("rebuild return code = %i" %success)
	return success
Exemple #55
0
def collect_libraries_from_dir(dirs, mask, logger):
	''' Collects all libraries from specified list of directories.
		mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory
		Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair
		(symlink_id, library_id) for resolving dependencies
	'''

	# contains list of directories found
	# allows us to reduce number of fnc calls
	found_directories = []  
	found_files = []
	found_symlinks = []
	found_la_files = [] # la libraries
	symlink_pairs = []  # list of pairs symlink_id->library_id

	for _dir in dirs:
		if _dir in mask:
			continue

		try:
			for listing in os.listdir(_dir):
				listing = os.path.join(_dir, listing)
				if listing in mask:
					continue

				if os.path.isdir(listing):
					if os.path.islink(listing):
						#we do not want scan symlink-directories
						pass
					else:
						found_directories.append(listing)
				elif os.path.isfile(listing):
					if (listing.endswith('.so') or 
						listing.endswith('.a') or 
						'.so.' in listing
						):
						if listing in found_files or listing in found_symlinks:
							continue

						if os.path.islink(listing):
							found_symlinks.append(listing)
							abs_path = os.path.realpath(listing)
							if abs_path in found_files:
								index = found_files.index(abs_path)
							else:
								found_files.append(abs_path)
								index = len(found_files)-1
							symlink_pairs.append((len(found_symlinks)-1, index,))
						else:
							found_files.append(listing)
						continue
					elif listing.endswith('.la'):
						if listing in found_la_files:
							continue

						found_la_files.append(listing)
					else:
						# sometimes there are binaries in libs' subdir,
						# for example in nagios
						if not os.path.islink(listing):
							if listing in found_files or listing in found_symlinks:
								continue
							prv = os.stat(listing)[stat.ST_MODE]
							if prv & stat.S_IXUSR == stat.S_IXUSR or \
									prv & stat.S_IXGRP == stat.S_IXGRP or \
									prv & stat.S_IXOTH == stat.S_IXOTH:
								found_files.append(listing)
		except Exception as ex:
			logger.debug(
				yellow('Exception during collecting libraries: ' + 
				blue('%s')  %str(ex)))


	if found_directories:
		_file, la_file, link, pair = \
			collect_libraries_from_dir(found_directories, mask, logger)
		found_files += _file
		found_la_files += la_file
		found_symlinks += link
		symlink_pairs += pair

	return (found_files, found_la_files, found_symlinks, symlink_pairs)
Exemple #56
0
def updatedb(config=None):

    if not os.access(config['esearchdbdir'], os.W_OK):
        print(yellow("Warning:"),
            "You do not have sufficient permissions to save the index file in:",
            green(config['esearchdbdir']), file=config['stderr'])
        return False

    if config['verbose'] != -1 and "ACCEPT_KEYWORDS" in environ:
        print(yellow("Warning:"),
            "You have set ACCEPT_KEYWORDS in environment, this will result",
            file=config['stdout'])
        print("         in a modified index file", file=config['stdout'])

    ebuilds = portage.portdb.cp_all()
    numebuilds = len(ebuilds)

    if exists(config['tmpfile']):
        error("there is probably another eupdatedb running already.\n" +
            "         If you're sure there is no other process, remove",
            config['tmpfile'], fatal=False)
        return False
    try:
        dbfd = open(config['tmpfile'], O_CREAT | O_EXCL | O_WRONLY, 0o600)
    except OSError:
        error("Failed to open temporary file.", fatal=False)
        return False
    dbfile = fdopen(dbfd, "w")
    dbfile.write("dbversion = " + str(config['needdbversion']) + "\n")
    dbfile.write("db = (\n")

    if not config['verbose']:
        config['stdout'].write(green(" * ") + "indexing: ")
        config['stdout'].flush()
        nr = 0
        nrchars = 0
    elif config['verbose'] == 1:
        lastcat = False

    cattime = time()
    try:
        for pkg in ebuilds:
            masked = False

            if not config['verbose']:
                nr += 1
                s = str(numebuilds - nr) + " ebuilds to go"
                config['stdout'].write((nrchars * "\b \b") + s)
                config['stdout'].flush()
                nrchars = len(s)

            pkgv = portage.portdb.xmatch("bestmatch-visible", pkg)
            if not pkgv:
                pkgv = portage.best(portage.portdb.xmatch("match-all", pkg))
                if not pkgv:
                    continue
                masked = True

            if len(pkgv) > 1:
                try:
                    homepage, description, _license = portage.portdb.aux_get(
                        pkgv, ["HOMEPAGE", "DESCRIPTION", "LICENSE"])
                except KeyError:
                    homepage, description, _license = "", "", ""
                    pass

            if len(pkgv) > 1:
                filesize = getfetchsize(pkgv)
            else:
                filesize = '0'

            (curcat, pkgname) = pkg.split("/")

            if config['verbose'] == 1 and curcat != lastcat:
                if lastcat != False:
                    print(duration(cattime), file=config['stdout'])
                print(bold(" * " + curcat) + ":", end=' ', file=config['stdout'])
                cattime = time()
                lastcat = curcat

            installed = pkg_version(VARTREE.dep_bestmatch(pkg))
            if installed:
                installed = str(installed)

            dbfile.write(repr((str(pkgname),
                            str(pkg),
                            masked,
                            str(pkg_version(pkgv)),
                            installed,
                            str(filesize),
                            str(homepage),
                            str(description),
                            str(_license))) + str(",\n"))
    except KeyboardInterrupt:
        dbfile.close()
        unlink(config['tmpfile'])
        print("", file=config['stdout'])
        return False

    print("", file=config['stdout'])

    dbfile.write(")")
    dbfile.close()

    copyfile(config['tmpfile'],
        os.path.join(config['esearchdbdir'], config['esearchdbfile']))
    unlink(config['tmpfile'])

    sys.path.insert(0, config['esearchdbdir'])
    import esearchdb # import the file, to generate pyc

    if exists(
        os.path.join(config['esearchdbdir'], config['esearchdbfile']) + "c"):
        config['esearchdbfile'] += "c"

    print(green(" *"), "esearch-index generated in", duration(start),
        file=config['stdout'])
    print(green(" *"), "indexed", bold(str(numebuilds)), "ebuilds",
        file=config['stdout'])
    print(green(" *"), "size of esearch-index:",
        bold(str(int(stat(
            os.path.join(config['esearchdbdir'], config['esearchdbfile'])
            )[6]/1024)) + " kB"), file=config['stdout'])
    return True
Exemple #57
0
def warn(msg, fatal=False, stdout=CONFIG['stdout']):
    print(yellow(" * Warning:"), msg, file=stdout)
    print('', file=stdout)
    if fatal:
        sys.exit(1)
Exemple #58
0
def printUsage(_error=None, help=None):
	"""Print help message. May also print partial help to stderr if an
	error from {'options','actions'} is specified."""

	out = sys.stdout
	if _error:
		out = sys.stderr
	if not _error in ('actions', 'global-options', \
			'packages-options', 'distfiles-options', \
			'merged-packages-options', 'merged-distfiles-options', \
			'time', 'size'):
		_error = None
	if not _error and not help: help = 'all'
	if _error == 'time':
		print( pp.error("Wrong time specification"), file=out)
		print( "Time specification should be an integer followed by a"+
				" single letter unit.", file=out)
		print( "Available units are: y (years), m (months), w (weeks), "+
				"d (days) and h (hours).", file=out)
		print( "For instance: \"1y\" is \"one year\", \"2w\" is \"two"+
				" weeks\", etc. ", file=out)
		return
	if _error == 'size':
		print( pp.error("Wrong size specification"), file=out)
		print( "Size specification should be an integer followed by a"+
				" single letter unit.", file=out)
		print( "Available units are: G, M, K and B.", file=out)
		print("For instance: \"10M\" is \"ten megabytes\", \"200K\" "+
				"is \"two hundreds kilobytes\", etc.", file=out)
		return
	if _error in ('global-options', 'packages-options', 'distfiles-options', \
			'merged-packages-options', 'merged-distfiles-options',):
		print( pp.error("Wrong option on command line."), file=out)
		print( file=out)
	elif _error == 'actions':
		print( pp.error("Wrong or missing action name on command line."), file=out)
		print( file=out)
	print( white("Usage:"), file=out)
	if _error in ('actions','global-options', 'packages-options', \
			'distfiles-options') or help == 'all':
		print( " "+turquoise(__productname__),
			yellow("[global-option] ..."),
			green("<action>"),
			yellow("[action-option] ..."), file=out)
	if _error == 'merged-distfiles-options' or help in ('all','distfiles'):
		print( " "+turquoise(__productname__+'-dist'),
			yellow("[global-option, distfiles-option] ..."), file=out)
	if _error == 'merged-packages-options' or help in ('all','packages'):
		print( " "+turquoise(__productname__+'-pkg'),
			yellow("[global-option, packages-option] ..."), file=out)
	if _error in ('global-options', 'actions'):
		print( " "+turquoise(__productname__),
			yellow("[--help, --version]"), file=out)
	if help == 'all':
		print( " "+turquoise(__productname__+"(-dist,-pkg)"),
			yellow("[--help, --version]"), file=out)
	if _error == 'merged-packages-options' or help == 'packages':
		print( " "+turquoise(__productname__+'-pkg'),
			yellow("[--help, --version]"), file=out)
	if _error == 'merged-distfiles-options' or help == 'distfiles':
		print( " "+turquoise(__productname__+'-dist'),
			yellow("[--help, --version]"), file=out)
	print(file=out)
	if _error in ('global-options', 'merged-packages-options', \
	'merged-distfiles-options') or help:
		print( "Available global", yellow("options")+":", file=out)
		print( yellow(" -C, --nocolor")+
			"             - turn off colors on output", file=out)
		print( yellow(" -d, --deep")+
			"                - only keep the minimum for a reinstallation", file=out)
		print( yellow(" -e, --exclude-file=<path>")+
			" - path to the exclusion file", file=out)
		print( yellow(" -i, --interactive")+
			"         - ask confirmation before deletions", file=out)
		print( yellow(" -n, --package-names")+
			"       - protect all versions (when --deep)", file=out)
		print( yellow(" -p, --pretend")+
			"             - only display what would be cleaned", file=out)
		print( yellow(" -q, --quiet")+
			"               - be as quiet as possible", file=out)
		print( yellow(" -t, --time-limit=<time>")+
			"   - don't delete files modified since "+yellow("<time>"), file=out)
		print( "   "+yellow("<time>"), "is a duration: \"1y\" is"+
				" \"one year\", \"2w\" is \"two weeks\", etc. ", file=out)
		print( "   "+"Units are: y (years), m (months), w (weeks), "+
				"d (days) and h (hours).", file=out)
		print( yellow(" -h, --help")+ \
			"                - display the help screen", file=out)
		print( yellow(" -V, --version")+
			"             - display version info", file=out)
		print( file=out)
	if _error == 'actions' or help == 'all':
		print( "Available", green("actions")+":", file=out)
		print( green(" packages")+
			"     - clean outdated binary packages from PKGDIR", file=out)
		print( green(" distfiles")+
			"    - clean outdated packages sources files from DISTDIR", file=out)
		print( file=out)
	if _error in ('packages-options','merged-packages-options') \
	or help in ('all','packages'):
		print( "Available", yellow("options"),"for the",
				green("packages"),"action:", file=out)
		print( yellow(" -i, --ignore-failure")+
			"             - ignore failure to locate PKGDIR", file=out)
		print( file=out)
	if _error in ('distfiles-options', 'merged-distfiles-options') \
	or help in ('all','distfiles'):
		print("Available", yellow("options"),"for the",
				green("distfiles"),"action:", file=out)
		print( yellow(" -f, --fetch-restricted")+
			"   - protect fetch-restricted files (when --deep)", file=out)
		print( yellow(" -s, --size-limit=<size>")+
			"  - don't delete distfiles bigger than "+yellow("<size>"), file=out)
		print( "   "+yellow("<size>"), "is a size specification: "+
				"\"10M\" is \"ten megabytes\", \"200K\" is", file=out)
		print( "   "+"\"two hundreds kilobytes\", etc.  Units are: "+
				"G, M, K and B.", file=out)
		print( file=out)
	print( "More detailed instruction can be found in",
			turquoise("`man %s`" % __productname__), file=out)