Example #1
0
    def get_coin_features(self):
        "set the global variable SWIG_COND_SYMBOLS needed for conditional " + \
        "wrapping"
        if sys.platform == "win32": return
        print blue("Checking for Coin features..."),
        if not os.system("coin-config --have-feature 3ds_import"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_3DS_IMPORT")
            print green("3ds import "),

        if not os.system("coin-config --have-feature vrml97"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_VRML97")
            print green("vrml97 "),

        if not os.system("coin-config --have-feature sound"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_SOUND")
            print green("sound "),

        if not os.system("coin-config --have-feature superglu"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_SUPERGLUE")
            print green("superglu "),

        if not os.system("coin-config --have-feature threads"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_THREADS")
            print green("threads "),

        if not os.system("coin-config --have-feature threadsafe"):
            self.SWIG_COND_SYMBOLS.append("-DHAVE_FEATURE_THREADSAFE")
            print green("threadsafe "),

        print
Example #2
0
 def check_cmd_exists(self, cmd):
     "return the path of the specified command if it exists"
     print blue("Checking for %s..." % cmd),
     for path in os.environ['PATH'].split(os.path.pathsep):
         if os.path.exists(os.path.join(path, cmd)):
             print blue("'%s'" % os.path.join(path, cmd))
             return 1
     print red("not found.")
     return 0
Example #3
0
 def check_coin_version(self):
     "check the Coin version"
     if sys.platform == "win32": return
     if not self.check_cmd_exists("coin-config"):
         sys.exit(1)
     print blue("Coin version..."),
     version = self.do_os_popen("coin-config --version")
     print blue("%s" % version)
     if not version.startswith('3'):
         print yellow("** Warning: Pivy has only been tested with Coin "
                      "versions Coin-dev 3.")
Example #4
0
    def swig_generate(self):
        "build all available modules"

        quote = lambda s : '"' + s + '"'
        for module in self.MODULES:
            module_name = self.MODULES[module][0]
            config_cmd = self.MODULES[module][1]
            module_pkg_name = self.MODULES[module][2]
            mod_out_prefix = module_pkg_name.replace('.', os.sep) + module
            
            if sys.platform == "win32":
                INCLUDE_DIR = os.path.join(os.getenv("COINDIR"), "include")
                CPP_FLAGS = "-I" + quote(INCLUDE_DIR) + " " + \
                            "-I" + quote(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "annex")) + \
                            " /DCOIN_DLL /wd4244 /wd4049"
                # aquire highest non-debug Coin library version
                LDFLAGS_LIBS = quote(max(glob.glob(os.path.join(os.getenv("COINDIR"), "lib", "coin?.lib")))) + " "
                if module == "sowin":
                    CPP_FLAGS += " /DSOWIN_DLL"
                    LDFLAGS_LIBS += quote(os.path.join(os.getenv("COINDIR"), "lib", "sowin1.lib"))
                elif module == "soqt":
                    CPP_FLAGS += " -I" + '"' + os.getenv("QTDIR") + "\\include\"  /DSOQT_DLL"
                    CPP_FLAGS += " -I" + '"' + os.getenv("QTDIR") + "\\include\Qt\""
                    LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"), "lib", "soqt1.lib") + " "
            else:
                INCLUDE_DIR = self.do_os_popen("coin-config --includedir")
                if module_name != 'coin':
                    mod_include_dir = self.do_os_popen("%s --includedir" % config_cmd)
                    if mod_include_dir != INCLUDE_DIR:
                        INCLUDE_DIR += '\" -I\"%s' % mod_include_dir
                CPP_FLAGS = self.do_os_popen("%s --cppflags" % config_cmd)
                LDFLAGS_LIBS = self.do_os_popen("%s --ldflags --libs" % config_cmd)

            if not os.path.isfile(mod_out_prefix + "_wrap.cpp"):
                print red("\n=== Generating %s_wrap.cpp for %s ===\n" %
                          (mod_out_prefix, module))
                print blue(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " + self.SWIG_PARAMS %
                           (INCLUDE_DIR,
                            self.CXX_INCS,
                            mod_out_prefix, module))
                if os.system(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " + self.SWIG_PARAMS %
                             (INCLUDE_DIR,
                              self.CXX_INCS,
                              mod_out_prefix, module)):
                    print red("SWIG did not generate wrappers successfully! ** Aborting **")
                    sys.exit(1)
            else:
                print red("=== %s_wrap.cpp for %s already exists! ===" % (mod_out_prefix, module_pkg_name + module))

            self.ext_modules.append(Extension(module_name, [mod_out_prefix + "_wrap.cpp"],
                                              extra_compile_args=(self.CXX_INCS + CPP_FLAGS).split(),
                                              extra_link_args=(self.CXX_LIBS + LDFLAGS_LIBS).split()))
            self.py_modules.append(module_pkg_name + module)
Example #5
0
    def check_simvoleon_version(self):
        "return if SIMVoleon is available and check the version"
        if sys.platform == "win32" or not self.check_cmd_exists("simvoleon-config"):
            self.MODULES.pop('simvoleon', None)
            return False

        print blue("SIMVoleon version..."),
        version = self.do_os_popen("simvoleon-config --version")
        print blue("%s" % version)
        if not version.startswith('2.0'):
            print yellow("** Warning: Pivy has only been tested with SIMVoleon "
                         "versions 2.0.x.")
        return True
Example #6
0
 def check_swig_version(self, swig):
     "check for the swig version"
     global SWIG_VERSION
     if not self.check_cmd_exists(swig):
         sys.exit(1)
     print blue("Checking for SWIG version..."),
     p = subprocess.Popen("%s -version" % swig, 
                          shell=True, stdout=subprocess.PIPE)
     version = p.stdout.readlines()[1].strip().split(" ")[2]
     p.stdout.close()
     print blue("%s" % version)
     SWIG_VERSION = version
     if not version in self.SUPPORTED_SWIG_VERSIONS:
         print yellow("Warning: Pivy has only been tested with the following " + \
                      "SWIG versions: %s." % " ".join(self.SUPPORTED_SWIG_VERSIONS))
Example #7
0
	def convert_myoldbest(self, pkg, pkg_info):
		"""converts and colorizes a version list to a string

		@param pkg: _emerge.Package.Package instance
		@param pkg_info: dictionary
		@rtype string.
		"""
		myoldbest = pkg_info.oldbest_list
		# Convert myoldbest from a list to a string.
		myoldbest_str = ""
		if myoldbest:
			versions = []
			for pos, old_pkg in enumerate(myoldbest):
				key = old_pkg.version
				if key[-3:] == "-r0":
					key = key[:-3]
				if self.conf.verbosity == 3:
					if pkg_info.attr_display.new_slot:
						key += _slot_separator + old_pkg.slot
						if old_pkg.slot != old_pkg.sub_slot:
							key += "/" + old_pkg.sub_slot
					elif any(x.slot + "/" + x.sub_slot != "0/0" for x in myoldbest + [pkg]):
						key += _slot_separator + old_pkg.slot
						if old_pkg.slot != old_pkg.sub_slot or \
							old_pkg.slot == pkg.slot and old_pkg.sub_slot != pkg.sub_slot:
							key += "/" + old_pkg.sub_slot
					if not self.quiet_repo_display and (self.verbose_main_repo_display or
						self.portdb.repositories.mainRepo() is None or
						any(x.repo != self.portdb.repositories.mainRepo().name for x in myoldbest + [pkg])):
						key += _repo_separator + old_pkg.repo
				versions.append(key)
			myoldbest_str = blue("["+", ".join(versions)+"]")
		return myoldbest_str
Example #8
0
	def _insert_slot(self, pkg, pkg_info, myinslotlist):
		"""Adds slot info to the message

		@returns addl: formatted slot info
		@returns myoldbest: installed version list
		Modifies self.counters.downgrades, self.counters.upgrades,
			self.counters.binary
		"""
		addl = "   " + pkg_info.fetch_symbol
		if not cpvequal(pkg.cpv,
			best([pkg.cpv] + [x.cpv for x in myinslotlist])):
			# Downgrade in slot
			addl += turquoise("U")+blue("D")
			if pkg_info.ordered:
				self.counters.downgrades += 1
				if pkg.type_name == "binary":
					self.counters.binary += 1
		else:
			# Update in slot
			addl += turquoise("U") + " "
			if pkg_info.ordered:
				self.counters.upgrades += 1
				if pkg.type_name == "binary":
					self.counters.binary += 1
		return addl
Example #9
0
	def _set_no_columns(self, pkg, pkg_info):
		"""prints pkg info without column indentation.

		@param pkg: _emerge.Package.Package instance
		@param pkg_info: dictionary
		@rtype the updated addl
		"""
		pkg_str = pkg.cpv
		if self.conf.verbosity == 3:
			pkg_str = self._append_slot(pkg_str, pkg, pkg_info)
			pkg_str = self._append_repository(pkg_str, pkg, pkg_info)
		if not pkg_info.merge:
			addl = self.empty_space_in_brackets()
			myprint = "[%s%s] %s%s %s" % \
				(self.pkgprint(pkg_info.operation.ljust(13),
				pkg_info), addl,
				self.indent, self.pkgprint(pkg_str, pkg_info),
				pkg_info.oldbest)
		else:
			myprint = "[%s %s%s] %s%s %s" % \
				(self.pkgprint(pkg.type_name, pkg_info),
				pkg_info.attr_display, \
				bold(blue("L")) if self._has_local_patch(pkg) else " ", \
				self.indent, \
				self.pkgprint(pkg_str, pkg_info), pkg_info.oldbest)
				#if self.localpatch_enabled:
				#self. += bold(blue("L")) if self._has_local_patch(pkg) else " "
		return myprint
Example #10
0
    def copy_and_swigify_headers(self, includedir, dirname, files):
        """Copy the header files to the local include directories. Add an
        #include line at the beginning for the SWIG interface files..."""

        for file in files:
            if not os.path.isfile(os.path.join(dirname, file)):
                continue

            if file[-2:] == ".i":
                file_i = os.path.join(dirname, file)
                file_h = os.path.join(dirname, file)[:-2] + ".h"

                if (not os.path.exists(file_h) and
                    os.path.exists(os.path.join(includedir, file_h))):
                    shutil.copyfile(os.path.join(includedir, file_h), file_h)
                    sys.stdout.write(' ' + turquoise(file_h))
                    fd = open(file_h, 'r+')
                    contents = fd.readlines()

                    ins_line_nr = -1
                    for line in contents:
                        ins_line_nr += 1
                        if line.find("#include ") != -1:
                            break

                    if ins_line_nr != -1:
                        contents.insert(ins_line_nr, self.pivy_header_include % (file_i))
                        fd.seek(0)
                        fd.writelines(contents)
                    else:
                        print blue("[") + red("failed") + blue("]")
                        sys.exit(1)
                    fd.close
            # fixes for SWIG 1.3.21 and upwards
            # (mostly workarounding swig's preprocessor "function like macros"
            # preprocessor bug when no parameters are provided which then results
            # in no constructors being created in the wrapper)
            elif file[-4:] == ".fix":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-4]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-4])
            # had to introduce this because windows is a piece of crap
            elif sys.platform == "win32" and file[-6:] == ".win32":
                sys.stdout.write(' ' + red(os.path.join(dirname, file)[:-6]))
                shutil.copyfile(os.path.join(dirname, file),
                                os.path.join(dirname, file)[:-6])
Example #11
0
def keyword(string, stable=True, hard_masked=False):
	"""Returns a keyword string."""
	if stable:
		return output.green(string)
	if hard_masked:
		return output.red(string)
	# keyword masked:
	return output.blue(string)
	def __str__(self):
		s = self.name
		if self.enabled:
			s = red(s)
		else:
			s = '-' + s
			s = blue(s)
		if self.forced:
			s = '(%s)' % s
		return s
Example #13
0
    def pivy_configure(self):
        "configure Pivy"
        print turquoise(self.PIVY_SNAKES)
        print blue("Platform...%s" % sys.platform)
        self.check_python_version()
        self.check_swig_version(self.SWIG)
        self.check_coin_version()
        self.get_coin_features()
        if self.SOGUI: self.check_gui_bindings()
        
        if 'simvoleon' in self.MODULES and self.check_simvoleon_version():
            if sys.platform == "win32":
                INCLUDE_DIR = os.getenv("SIMVOLEONDIR") + "\\include"
            else:
                INCLUDE_DIR = self.do_os_popen("simvoleon-config --includedir")

            sys.stdout.write(blue("Preparing") + green(" VolumeViz ") + blue("headers:"))
            os.path.walk("VolumeViz", self.copy_and_swigify_headers,
                         INCLUDE_DIR)
            print green(".")

        if sys.platform == "win32":
            INCLUDE_DIR = os.path.join(os.getenv("COINDIR"), "include")
        else:
            INCLUDE_DIR = self.do_os_popen("coin-config --includedir")

        sys.stdout.write(blue("Preparing") + green(" Inventor ") + blue("headers:"))
        os.path.walk("Inventor", self.copy_and_swigify_headers,
                     INCLUDE_DIR)
        print green(".")
Example #14
0
    def run(self):
        "the entry point for the distutils clean class"
        sys.stdout.write(blue("Cleaning headers:"))
        os.path.walk("Inventor", self.remove_headers, None)
        os.path.walk("VolumeViz", self.remove_headers, None)
        # remove the SWIG generated wrappers
        for wrapper_file in self.REMOVE_FILES:
            if os.path.isfile(wrapper_file):
                sys.stdout.write(' ' + turquoise(wrapper_file))
                os.remove(wrapper_file)
        print green(".")

        clean.run(self)
Example #15
0
def masking(mask):
	"""Returns a 'masked by' string."""
	if 'package.mask' in mask or 'profile' in mask:
		# use porthole wrap style to help clarify meaning
		return output.red("M["+mask[0]+"]")
	if mask is not []:
		for status in mask:
			if 'keyword' in status:
				# keyword masked | " [missing keyword] " <=looks better
				return output.blue("["+status+"]")
			if status in archlist:
				return output.green(status)
			if 'unknown' in status:
				return output.yellow(status)
		return output.red(status)
	return ''
Example #16
0
def collect_binaries_from_dir(dirs, mask, logger):
	''' Collects all binaries from specified list of directories.
		mask is list of pathes, that are ommited in scanning,
		can be eighter single file or entire directory
		Returns list of binaries
	'''

	# contains list of directories found
	# allows us to reduce number of fnc calls
	found_directories = []  
	found_files = []

	for _dir in dirs:
		if _dir in mask:
			continue

		try:
			for listing in os.listdir(_dir):
				listing = os.path.join(_dir, listing)
				if listing in mask:
					continue

				if os.path.isdir(listing):
					if os.path.islink(listing):
						#we do not want scan symlink-directories
						pass
					else:
						found_directories.append(listing)
				elif os.path.isfile(listing):
					# we're looking for binaries
					# and with binaries we do not need links
					# thus we can optimize a bit
					if not os.path.islink(listing):
						prv = os.stat(listing)[stat.ST_MODE]
						if prv & stat.S_IXUSR == stat.S_IXUSR or \
								prv & stat.S_IXGRP == stat.S_IXGRP or \
								prv & stat.S_IXOTH == stat.S_IXOTH:
							found_files.append(listing)
		except Exception as ex:
			logger.debug(
				yellow('Exception during binaries collecting: '+
				blue('%s') %str(ex)))

	if found_directories:
		found_files += collect_binaries_from_dir(found_directories, mask, logger)

	return found_files
Example #17
0
    def __str__(self):
        output = []

        if self.interactive:
            output.append(colorize("WARN", "I"))
        else:
            output.append(" ")

        if self.new or self.force_reinstall:
            if self.force_reinstall:
                output.append(red("r"))
            else:
                output.append(green("N"))
        else:
            output.append(" ")

        if self.new_slot or self.replace:
            if self.replace:
                output.append(yellow("R"))
            else:
                output.append(green("S"))
        else:
            output.append(" ")

        if self.fetch_restrict or self.fetch_restrict_satisfied:
            if self.fetch_restrict_satisfied:
                output.append(green("f"))
            else:
                output.append(red("F"))
        else:
            output.append(" ")

        if self.new_version:
            output.append(turquoise("U"))
        else:
            output.append(" ")

        if self.downgrade:
            output.append(blue("D"))
        else:
            output.append(" ")

        if self.mask is not None:
            output.append(self.mask)

        return "".join(output)
Example #18
0
	def convert_myoldbest(myoldbest):
		"""converts and colorizes a version list to a string

		@param myoldbest: list
		@rtype string.
		"""
		# Convert myoldbest from a list to a string.
		myoldbest_str = ""
		if myoldbest:
			versions = []
			for pos, pkg in enumerate(myoldbest):
				key = catpkgsplit(pkg.cpv)[2] + \
					"-" + catpkgsplit(pkg.cpv)[3]
				if key[-3:] == "-r0":
					key = key[:-3]
				versions.append(key)
			myoldbest_str = blue("["+", ".join(versions)+"]")
		return myoldbest_str
Example #19
0
def do_normal(pkg, verbose):
    data = []
    if not pkg[4]:
        installed = "[ Not Installed ]"
    else:
        installed = pkg[4]

    if pkg[2]:
        masked = red(" [ Masked ]")
    else:
        masked = ""

    data.append("%s  %s%s\n      %s %s\n      %s %s" % \
            (green("*"), bold(pkg[1]), masked,
            darkgreen("Latest version available:"), pkg[3],
            darkgreen("Latest version installed:"), installed))

    if verbose:
        mpv = best(portdb.xmatch("match-all", pkg[1]))
        iuse_split, final_use = get_flags(mpv, final_setting=True)
        iuse = ""
        use_list = []
        for ebuild_iuse in iuse_split:
            use = ebuild_iuse.lstrip('+-')
            if use in final_use:
                use_list.append(red("+" + use) + " ")
            else:
                use_list.append(blue("-" + use) + " ")
        use_list.sort()
        iuse = ' '.join(use_list)
        if iuse == "":
            iuse = "-"

        data.append("      %s         %s\n      %s       %s" % \
                (darkgreen("Unstable version:"), pkg_version(mpv),
                 darkgreen("Use Flags (stable):"), iuse))

    data.append("      %s %s\n      %s    %s\n      %s %s\n      %s     %s\n" % \
            (darkgreen("Size of downloaded files:"), pkg[5],
             darkgreen("Homepage:"), pkg[6],
             darkgreen("Description:"), pkg[7],
             darkgreen("License:"), pkg[8]))
    return data
Example #20
0
	def convert_myoldbest(self, pkg, myoldbest):
		"""converts and colorizes a version list to a string

		@param pkg: _emerge.Package.Package instance
		@param myoldbest: list
		@rtype string.
		"""
		# Convert myoldbest from a list to a string.
		myoldbest_str = ""
		if myoldbest:
			versions = []
			for pos, old_pkg in enumerate(myoldbest):
				key = catpkgsplit(old_pkg.cpv)[2] + "-" + catpkgsplit(old_pkg.cpv)[3]
				if key[-3:] == "-r0":
					key = key[:-3]
				if self.conf.verbosity == 3 and not self.quiet_repo_display and (self.verbose_main_repo_display or
					any(x.repo != self.portdb.repositories.mainRepo().name for x in myoldbest + [pkg])):
					key += _repo_separator + old_pkg.repo
				versions.append(key)
			myoldbest_str = blue("["+", ".join(versions)+"]")
		return myoldbest_str
Example #21
0
 def check_gui_bindings(self):
     "check for availability of SoGui bindings and removes the not available ones"
     if sys.platform == "win32":
         print "Coin and SoWin are built by default on Windows..."
         self.MODULES.pop('soxt', None)
         self.MODULES.pop('sogtk', None)
         print blue("Checking for QTDIR environment variable..."),
         if os.getenv("QTDIR"):
             print blue(os.getenv("QTDIR"))
         else:
             self.MODULES.pop('soqt', None)
             print red("not set. (SoQt bindings won't be built)")
     else:
         for gui in self.SOGUI:
             if not self.MODULES.has_key(gui):
                 continue
             gui_config_cmd = self.MODULES[gui][1]
             if not self.check_cmd_exists(gui_config_cmd):
                 self.MODULES.pop(gui, None)
             else:
                 print blue("Checking for %s version..." % gui),
                 version = self.do_os_popen("%s --version" % gui_config_cmd)
                 print blue("%s" % version)
Example #22
0
    def update(self):
        '''Internal update function which performs the transfer'''
        opts = self.options.get('emerge_config').opts
        self.usersync_uid = self.options.get('usersync_uid', None)
        enter_invalid = '--ask-enter-invalid' in opts
        out = portage.output.EOutput()
        syncuri = self.repo.sync_uri
        if self.repo.module_specific_options.get('sync-rsync-vcs-ignore',
                                                 'false').lower() == 'true':
            vcs_dirs = ()
        else:
            vcs_dirs = frozenset(VCS_DIRS)
            vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location))

        for vcs_dir in vcs_dirs:
            writemsg_level(("!!! %s appears to be under revision " + \
             "control (contains %s).\n!!! Aborting rsync sync "
             "(override with \"sync-rsync-vcs-ignore = true\" in repos.conf).\n") % \
             (self.repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1)
            return (1, False)
        self.timeout = 180

        rsync_opts = []
        if self.settings["PORTAGE_RSYNC_OPTS"] == "":
            rsync_opts = self._set_rsync_defaults()
        else:
            rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri)
        self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts)

        self.extra_rsync_opts = list()
        if self.repo.module_specific_options.get('sync-rsync-extra-opts'):
            self.extra_rsync_opts.extend(
                portage.util.shlex_split(
                    self.repo.module_specific_options['sync-rsync-extra-opts'])
            )

        # Process GLEP74 verification options.
        # Default verification to 'no'; it's enabled for ::gentoo
        # via default repos.conf though.
        self.verify_metamanifest = (self.repo.module_specific_options.get(
            'sync-rsync-verify-metamanifest', 'no') in ('yes', 'true'))
        # Support overriding job count.
        self.verify_jobs = self.repo.module_specific_options.get(
            'sync-rsync-verify-jobs', None)
        if self.verify_jobs is not None:
            try:
                self.verify_jobs = int(self.verify_jobs)
                if self.verify_jobs <= 0:
                    raise ValueError(self.verify_jobs)
            except ValueError:
                writemsg_level(
                    "!!! sync-rsync-verify-jobs not a positive integer: %s\n" %
                    (self.verify_jobs, ),
                    level=logging.WARNING,
                    noiselevel=-1)
                self.verify_jobs = None
        # Support overriding max age.
        self.max_age = self.repo.module_specific_options.get(
            'sync-rsync-verify-max-age', '')
        if self.max_age:
            try:
                self.max_age = int(self.max_age)
                if self.max_age < 0:
                    raise ValueError(self.max_age)
            except ValueError:
                writemsg_level(
                    "!!! sync-rsync-max-age must be a non-negative integer: %s\n"
                    % (self.max_age, ),
                    level=logging.WARNING,
                    noiselevel=-1)
                self.max_age = 0
        else:
            self.max_age = 0

        openpgp_env = None
        if self.verify_metamanifest and gemato is not None:
            # Use isolated environment if key is specified,
            # system environment otherwise
            if self.repo.sync_openpgp_key_path is not None:
                openpgp_env = gemato.openpgp.OpenPGPEnvironment()
            else:
                openpgp_env = gemato.openpgp.OpenPGPSystemEnvironment()

        try:
            # Load and update the keyring early. If it fails, then verification
            # will not be performed and the user will have to fix it and try again,
            # so we may as well bail out before actual rsync happens.
            if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:

                try:
                    out.einfo('Using keys from %s' %
                              (self.repo.sync_openpgp_key_path, ))
                    with io.open(self.repo.sync_openpgp_key_path, 'rb') as f:
                        openpgp_env.import_key(f)
                    out.ebegin('Refreshing keys from keyserver')
                    retry_decorator = self._key_refresh_retry_decorator()
                    if retry_decorator is None:
                        openpgp_env.refresh_keys()
                    else:
                        loop = global_event_loop()
                        func_coroutine = functools.partial(
                            loop.run_in_executor, None,
                            openpgp_env.refresh_keys)
                        decorated_func = retry_decorator(func_coroutine)
                        loop.run_until_complete(decorated_func())
                    out.eend(0)
                except (GematoException, TimeoutError) as e:
                    writemsg_level(
                        "!!! Manifest verification impossible due to keyring problem:\n%s\n"
                        % (e, ),
                        level=logging.ERROR,
                        noiselevel=-1)
                    return (1, False)

            # Real local timestamp file.
            self.servertimestampfile = os.path.join(self.repo.location,
                                                    "metadata",
                                                    "timestamp.chk")

            content = portage.util.grabfile(self.servertimestampfile)
            timestamp = 0
            if content:
                try:
                    timestamp = time.mktime(
                        time.strptime(content[0], TIMESTAMP_FORMAT))
                except (OverflowError, ValueError):
                    pass
            del content

            try:
                self.rsync_initial_timeout = \
                 int(self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
            except ValueError:
                self.rsync_initial_timeout = 15

            try:
                maxretries = int(self.settings["PORTAGE_RSYNC_RETRIES"])
            except SystemExit as e:
                raise  # Needed else can't exit
            except:
                maxretries = -1  #default number of retries

            if syncuri.startswith("file://"):
                self.proto = "file"
                dosyncuri = syncuri[7:]
                unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
                    dosyncuri, timestamp, opts)
                self._process_exitcode(exitcode, dosyncuri, out, 1)
                return (exitcode, updatecache_flg)

            retries = 0
            try:
                self.proto, user_name, hostname, port = re.split(
                    r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
                    syncuri,
                    maxsplit=4)[1:5]
            except ValueError:
                writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri,
                               noiselevel=-1,
                               level=logging.ERROR)
                return (1, False)

            self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS")

            if port is None:
                port = ""
            if user_name is None:
                user_name = ""
            if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None:
                getaddrinfo_host = hostname
            else:
                # getaddrinfo needs the brackets stripped
                getaddrinfo_host = hostname[1:-1]
            updatecache_flg = False
            all_rsync_opts = set(self.rsync_opts)
            all_rsync_opts.update(self.extra_rsync_opts)

            family = socket.AF_UNSPEC
            if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
                family = socket.AF_INET
            elif socket.has_ipv6 and \
             ("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
                family = socket.AF_INET6

            addrinfos = None
            uris = []

            try:
                addrinfos = getaddrinfo_validate(
                    socket.getaddrinfo(getaddrinfo_host, None, family,
                                       socket.SOCK_STREAM))
            except socket.error as e:
                writemsg_level("!!! getaddrinfo failed for '%s': %s\n" %
                               (_unicode_decode(hostname), _unicode(e)),
                               noiselevel=-1,
                               level=logging.ERROR)

            if addrinfos:

                AF_INET = socket.AF_INET
                AF_INET6 = None
                if socket.has_ipv6:
                    AF_INET6 = socket.AF_INET6

                ips_v4 = []
                ips_v6 = []

                for addrinfo in addrinfos:
                    if addrinfo[0] == AF_INET:
                        ips_v4.append("%s" % addrinfo[4][0])
                    elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
                        # IPv6 addresses need to be enclosed in square brackets
                        ips_v6.append("[%s]" % addrinfo[4][0])

                random.shuffle(ips_v4)
                random.shuffle(ips_v6)

                # Give priority to the address family that
                # getaddrinfo() returned first.
                if AF_INET6 is not None and addrinfos and \
                 addrinfos[0][0] == AF_INET6:
                    ips = ips_v6 + ips_v4
                else:
                    ips = ips_v4 + ips_v6

                for ip in ips:
                    uris.append(
                        syncuri.replace(
                            "//" + user_name + hostname + port + "/",
                            "//" + user_name + ip + port + "/", 1))

            if not uris:
                # With some configurations we need to use the plain hostname
                # rather than try to resolve the ip addresses (bug #340817).
                uris.append(syncuri)

            # reverse, for use with pop()
            uris.reverse()
            uris_orig = uris[:]

            effective_maxretries = maxretries
            if effective_maxretries < 0:
                effective_maxretries = len(uris) - 1

            local_state_unchanged = True
            while (1):
                if uris:
                    dosyncuri = uris.pop()
                elif maxretries < 0 or retries > maxretries:
                    writemsg("!!! Exhausted addresses for %s\n" %
                             _unicode_decode(hostname),
                             noiselevel=-1)
                    return (1, False)
                else:
                    uris.extend(uris_orig)
                    dosyncuri = uris.pop()

                if (retries == 0):
                    if "--ask" in opts:
                        uq = UserQuery(opts)
                        if uq.query("Do you want to sync your Portage tree " + \
                         "with the mirror at\n" + blue(dosyncuri) + bold("?"),
                         enter_invalid) == "No":
                            print()
                            print("Quitting.")
                            print()
                            sys.exit(128 + signal.SIGINT)
                    self.logger(self.xterm_titles,
                                ">>> Starting rsync with " + dosyncuri)
                    if "--quiet" not in opts:
                        print(">>> Starting rsync with " + dosyncuri + "...")
                else:
                    self.logger(self.xterm_titles,
                     ">>> Starting retry %d of %d with %s" % \
                      (retries, effective_maxretries, dosyncuri))
                    writemsg_stdout(
                     "\n\n>>> Starting retry %d of %d with %s\n" % \
                     (retries, effective_maxretries, dosyncuri), noiselevel=-1)

                if dosyncuri.startswith('ssh://'):
                    dosyncuri = dosyncuri[6:].replace('/', ':/', 1)

                unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
                    dosyncuri, timestamp, opts)
                if not unchanged:
                    local_state_unchanged = False
                if is_synced:
                    break

                retries = retries + 1

                if maxretries < 0 or retries <= maxretries:
                    print(">>> Retrying...")
                else:
                    # over retries
                    # exit loop
                    exitcode = EXCEEDED_MAX_RETRIES
                    break
            self._process_exitcode(exitcode, dosyncuri, out, maxretries)

            # if synced successfully, verify now
            if exitcode == 0 and self.verify_metamanifest:
                if gemato is None:
                    writemsg_level(
                        "!!! Unable to verify: gemato-11.0+ is required\n",
                        level=logging.ERROR,
                        noiselevel=-1)
                    exitcode = 127
                else:
                    try:
                        # we always verify the Manifest signature, in case
                        # we had to deal with key revocation case
                        m = gemato.recursiveloader.ManifestRecursiveLoader(
                            os.path.join(self.repo.location, 'Manifest'),
                            verify_openpgp=True,
                            openpgp_env=openpgp_env,
                            max_jobs=self.verify_jobs)
                        if not m.openpgp_signed:
                            raise RuntimeError(
                                'OpenPGP signature not found on Manifest')

                        ts = m.find_timestamp()
                        if ts is None:
                            raise RuntimeError(
                                'Timestamp not found in Manifest')
                        if (self.max_age != 0
                                and (datetime.datetime.utcnow() - ts.ts).days >
                                self.max_age):
                            out.ewarn(
                                'Manifest is over %d days old, this is suspicious!'
                                % (self.max_age, ))
                            out.ewarn(
                                'You may want to try using another mirror and/or reporting this one:'
                            )
                            out.ewarn('  %s' % (dosyncuri, ))
                            out.ewarn('')

                        out.einfo('Manifest timestamp: %s UTC' % (ts.ts, ))
                        out.einfo('Valid OpenPGP signature found:')
                        out.einfo(
                            '- primary key: %s' %
                            (m.openpgp_signature.primary_key_fingerprint))
                        out.einfo('- subkey: %s' %
                                  (m.openpgp_signature.fingerprint))
                        out.einfo('- timestamp: %s UTC' %
                                  (m.openpgp_signature.timestamp))

                        # if nothing has changed, skip the actual Manifest
                        # verification
                        if not local_state_unchanged:
                            out.ebegin('Verifying %s' % (self.repo.location, ))
                            m.assert_directory_verifies()
                            out.eend(0)
                    except GematoException as e:
                        writemsg_level(
                            "!!! Manifest verification failed:\n%s\n" % (e, ),
                            level=logging.ERROR,
                            noiselevel=-1)
                        exitcode = 1

            return (exitcode, updatecache_flg)
        finally:
            if openpgp_env is not None:
                openpgp_env.close()
Example #23
0
def _create_use_string(conf, name, cur_iuse, iuse_forced, cur_use, old_iuse,
                       old_use, is_new, feature_flags, reinst_flags):

    if not conf.print_use_string:
        return ""

    enabled = []
    if conf.alphabetical:
        disabled = enabled
        removed = enabled
    else:
        disabled = []
        removed = []
    cur_iuse = set(cur_iuse)
    enabled_flags = cur_iuse.intersection(cur_use)
    removed_iuse = set(old_iuse).difference(cur_iuse)
    any_iuse = cur_iuse.union(old_iuse)
    any_iuse = list(any_iuse)
    any_iuse.sort()

    for flag in any_iuse:
        flag_str = None
        isEnabled = False
        reinst_flag = reinst_flags and flag in reinst_flags
        if flag in enabled_flags:
            isEnabled = True
            if is_new or flag in old_use and \
             (conf.all_flags or reinst_flag):
                flag_str = red(flag)
            elif flag not in old_iuse:
                flag_str = yellow(flag) + "%*"
            elif flag not in old_use:
                flag_str = green(flag) + "*"
        elif flag in removed_iuse:
            if conf.all_flags or reinst_flag:
                flag_str = yellow("-" + flag) + "%"
                if flag in old_use:
                    flag_str += "*"
                flag_str = "(" + flag_str + ")"
                removed.append(flag_str)
            continue
        else:
            if is_new or flag in old_iuse and \
             flag not in old_use and \
             (conf.all_flags or reinst_flag):
                flag_str = blue("-" + flag)
            elif flag not in old_iuse:
                flag_str = yellow("-" + flag)
                if flag not in iuse_forced:
                    flag_str += "%"
            elif flag in old_use:
                flag_str = green("-" + flag) + "*"
        if flag_str:
            if flag in feature_flags:
                flag_str = "{" + flag_str + "}"
            elif flag in iuse_forced:
                flag_str = "(" + flag_str + ")"
            if isEnabled:
                enabled.append(flag_str)
            else:
                disabled.append(flag_str)

    if conf.alphabetical:
        ret = " ".join(enabled)
    else:
        ret = " ".join(enabled + disabled + removed)
    if ret:
        ret = '%s="%s" ' % (name, ret)
    return ret
Example #24
0
def main(settings=None, logger=None):
	"""Main program operation method....

	@param settings: dict.  defaults to settings.DEFAULTS
	@param logger: python logging module defaults to init_logger(settings)
	@return boolean  success/failure
	"""
	if settings is None:
		print("NO Input settings, using defaults...")
		settings = DEFAULTS.copy()

	if logger is None:
		logger = init_logger(settings)

	_libs_to_check = settings['library']

	if not settings['stdout'].isatty() or settings['nocolor']:
		nocolor()

	logger.warning(blue(' * ') +
		yellow('This is the new python coded version'))
	logger.warning(blue(' * ') +
		yellow('Please report any bugs found using it.'))
	logger.warning(blue(' * ') +
		yellow('The original revdep-rebuild script is '
			'installed as revdep-rebuild.sh'))
	logger.warning(blue(' * ') +
		yellow('Please file bugs at: '
			'https://bugs.gentoo.org/'))

	if os.getuid() != 0 and not settings['PRETEND']:
		logger.warning(blue(' * ') +
			yellow('You are not root, adding --pretend to portage options'))
		settings['PRETEND'] = True

	logger.debug("\tmain(), _libs_to_check = %s" % str(_libs_to_check))

	if settings['USE_TMP_FILES'] \
			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
		libraries, la_libraries, libraries_links, binaries = read_cache(
			settings['DEFAULT_TMP_DIR'])
		assigned, orphaned = analyse(
			settings=settings,
			logger=logger,
			libraries=libraries,
			la_libraries=la_libraries,
			libraries_links=libraries_links,
			binaries=binaries,
			_libs_to_check=_libs_to_check)
	else:
		assigned, orphaned = analyse(settings, logger, _libs_to_check=_libs_to_check)

	if not assigned and not orphaned:
		logger.warning('\n' + bold('Your system is consistent'))
		# return the correct exit code
		return 0
	elif orphaned:
		# blank line for beter visibility of the following lines
		logger.warning('')
		if settings['library']:
			logger.warning(red(' !!! Dependant orphaned files: ') +
				bold('No installed package was found for the following:'))
		else:
			logger.warning(red(' !!! Broken orphaned files: ') +
				bold('No installed package was found for the following:'))
		for filename in orphaned:
			logger.warning(red('\t* ') + filename)

	success = rebuild(logger, assigned, settings)
	logger.debug("rebuild return code = %i" %success)
	return success
Example #25
0
    def swig_generate(self):
        "build all available modules"

        def quote(s):
            return '"' + s + '"'

        def win_quote(s):
            if sys.platform == 'win32':
                return '"' + s + '"'
            return s

        for module in self.MODULES:
            module_name = self.MODULES[module][0]
            config_cmd = self.MODULES[module][1]
            module_pkg_name = self.MODULES[module][2]
            mod_hack_name = self.MODULES[module][3]
            mod_out_prefix = module_pkg_name.replace('.', os.sep) + module

            if sys.platform == "_win32":
                INCLUDE_DIR = os.path.join(os.getenv("COINDIR"), "include")
                CPP_FLAGS = "-I" + quote(INCLUDE_DIR) + " " + \
                            "-I" + quote(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "annex")) + \
                            " /DCOIN_DLL /wd4244 /wd4049"
                # aquire highest non-debug Coin library version
                try:
                    LDFLAGS_LIBS = quote(
                        max(
                            glob.glob(
                                os.path.join(os.getenv("COINDIR"), "lib",
                                             "coin?.lib")))) + " "
                # with cmake the coin library is named Coin4.lib
                except ValueError:
                    LDFLAGS_LIBS = quote(
                        max(
                            glob.glob(
                                os.path.join(os.getenv("COINDIR"), "lib",
                                             "Coin?.lib")))) + " "

                if module == "sowin":
                    CPP_FLAGS += " /DSOWIN_DLL"
                    LDFLAGS_LIBS += quote(
                        os.path.join(os.getenv("COINDIR"), "lib",
                                     "sowin1.lib"))
                elif module == "soqt":
                    CPP_FLAGS += " -I" + '"' + os.getenv(
                        "QTDIR") + "\\include\"  /DSOQT_DLL"
                    if os.path.isdir(os.getenv("QTDIR") + "\\include\Qt\""):
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\Qt\""
                        LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"),
                                                     "lib", "soqt1.lib") + " "
                    else:
                        # workaround for conda qt4:
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\qt\Qt\""
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\qt\""
                        LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"),
                                                     "lib", "SoQt.lib") + " "
            else:
                INCLUDE_DIR = self.cmake_config_dict[config_cmd +
                                                     '_INCLUDE_DIR']
                LIB_DIR = self.cmake_config_dict[config_cmd + '_LIB_DIR']
                CPP_FLAGS = ' -I' + win_quote(INCLUDE_DIR)
                CPP_FLAGS += ' -I' + win_quote(
                    os.path.join(INCLUDE_DIR, 'Inventor', 'annex'))
                if sys.platform == 'win32':
                    CPP_FLAGS += " /DCOIN_DLL /wd4244 /wd4049"
                    LDFLAGS_LIBS = quote(
                        max(glob.glob(os.path.join(LIB_DIR,
                                                   "Coin?.lib")))) + " "
                else:
                    CPP_FLAGS += " -Wno-unused -Wno-maybe-uninitialized"
                    LDFLAGS_LIBS = ' -L' + self.cmake_config_dict[config_cmd +
                                                                  '_LIB_DIR']

                if module == "soqt":
                    CPP_FLAGS += ' -I' + win_quote(
                        self.QTINFO.getHeadersPath())
                    CPP_FLAGS += ' -I' + win_quote(
                        os.path.join(self.QTINFO.getHeadersPath(), 'QtCore'))
                    CPP_FLAGS += ' -I' + win_quote(
                        os.path.join(self.QTINFO.getHeadersPath(), 'QtGui'))
                    CPP_FLAGS += ' -I' + win_quote(
                        os.path.join(self.QTINFO.getHeadersPath(), 'QtOpenGL'))
                    CPP_FLAGS += ' -I' + win_quote(
                        os.path.join(self.QTINFO.getHeadersPath(),
                                     'QtWidgets'))
                    if sys.platform == 'win32':
                        LDFLAGS_LIBS += " " + quote(
                            max(glob.glob(os.path.join(LIB_DIR,
                                                       "SoQt?.lib")))) + " "
                        CPP_FLAGS += " /DSOQT_DLL"
                    else:
                        LDFLAGS_LIBS += ' -lSoQt'

                if module == "coin":
                    if sys.platform == 'win32':
                        pass
                    else:
                        LDFLAGS_LIBS += ' -lCoin'

            if not os.path.isfile(mod_out_prefix + "_wrap.cpp"):
                print(
                    red("\n=== Generating %s_wrap.cpp for %s ===\n" %
                        (mod_out_prefix, module)))
                print(
                    blue(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " +
                         self.SWIG_PARAMS %
                         (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix, module)))
                if os.system(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS +
                             " " + self.SWIG_PARAMS %
                             (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix,
                              mod_hack_name)):
                    print(
                        red("SWIG did not generate wrappers successfully! ** Aborting **"
                            ))
                    sys.exit(1)
            else:
                print(
                    red("=== %s_wrap.cpp for %s already exists! ===" %
                        (mod_out_prefix, module_pkg_name + module)))

            self.ext_modules.append(
                Extension(
                    module_name, [mod_out_prefix + "_wrap.cpp"],
                    extra_compile_args=(self.CXX_INCS + CPP_FLAGS).split(),
                    extra_link_args=(self.CXX_LIBS + LDFLAGS_LIBS).split()))
Example #26
0
    def update(self):
        '''Internal update function which performs the transfer'''
        opts = self.options.get('emerge_config').opts
        self.usersync_uid = self.options.get('usersync_uid', None)
        enter_invalid = '--ask-enter-invalid' in opts
        out = portage.output.EOutput()
        syncuri = self.repo.sync_uri
        vcs_dirs = frozenset(VCS_DIRS)
        vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location))

        for vcs_dir in vcs_dirs:
            writemsg_level(("!!! %s appears to be under revision " + \
             "control (contains %s).\n!!! Aborting rsync sync.\n") % \
             (self.repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1)
            return (1, False)
        self.timeout = 180

        rsync_opts = []
        if self.settings["PORTAGE_RSYNC_OPTS"] == "":
            rsync_opts = self._set_rsync_defaults()
        else:
            rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri)
        self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts)

        self.extra_rsync_opts = list()
        if self.repo.module_specific_options.get('sync-rsync-extra-opts'):
            self.extra_rsync_opts.extend(
                portage.util.shlex_split(
                    self.repo.module_specific_options['sync-rsync-extra-opts'])
            )

        # Real local timestamp file.
        self.servertimestampfile = os.path.join(self.repo.location, "metadata",
                                                "timestamp.chk")

        content = portage.util.grabfile(self.servertimestampfile)
        timestamp = 0
        if content:
            try:
                timestamp = time.mktime(
                    time.strptime(content[0], TIMESTAMP_FORMAT))
            except (OverflowError, ValueError):
                pass
        del content

        try:
            self.rsync_initial_timeout = \
             int(self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
        except ValueError:
            self.rsync_initial_timeout = 15

        try:
            maxretries = int(self.settings["PORTAGE_RSYNC_RETRIES"])
        except SystemExit as e:
            raise  # Needed else can't exit
        except:
            maxretries = -1  #default number of retries

        if syncuri.startswith("file://"):
            self.proto = "file"
            dosyncuri = syncuri[6:]
            is_synced, exitcode = self._do_rsync(dosyncuri, timestamp, opts)
            self._process_exitcode(exitcode, dosyncuri, out, 1)
            return (exitcode, exitcode == os.EX_OK)

        retries = 0
        try:
            self.proto, user_name, hostname, port = re.split(
                r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
                syncuri,
                maxsplit=4)[1:5]
        except ValueError:
            writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri,
                           noiselevel=-1,
                           level=logging.ERROR)
            return (1, False)

        self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS")

        if port is None:
            port = ""
        if user_name is None:
            user_name = ""
        if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None:
            getaddrinfo_host = hostname
        else:
            # getaddrinfo needs the brackets stripped
            getaddrinfo_host = hostname[1:-1]
        updatecache_flg = True
        all_rsync_opts = set(self.rsync_opts)
        all_rsync_opts.update(self.extra_rsync_opts)

        family = socket.AF_UNSPEC
        if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
            family = socket.AF_INET
        elif socket.has_ipv6 and \
         ("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
            family = socket.AF_INET6

        addrinfos = None
        uris = []

        try:
            addrinfos = getaddrinfo_validate(
                socket.getaddrinfo(getaddrinfo_host, None, family,
                                   socket.SOCK_STREAM))
        except socket.error as e:
            writemsg_level("!!! getaddrinfo failed for '%s': %s\n" %
                           (_unicode_decode(hostname), _unicode(e)),
                           noiselevel=-1,
                           level=logging.ERROR)

        if addrinfos:

            AF_INET = socket.AF_INET
            AF_INET6 = None
            if socket.has_ipv6:
                AF_INET6 = socket.AF_INET6

            ips_v4 = []
            ips_v6 = []

            for addrinfo in addrinfos:
                if addrinfo[0] == AF_INET:
                    ips_v4.append("%s" % addrinfo[4][0])
                elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
                    # IPv6 addresses need to be enclosed in square brackets
                    ips_v6.append("[%s]" % addrinfo[4][0])

            random.shuffle(ips_v4)
            random.shuffle(ips_v6)

            # Give priority to the address family that
            # getaddrinfo() returned first.
            if AF_INET6 is not None and addrinfos and \
             addrinfos[0][0] == AF_INET6:
                ips = ips_v6 + ips_v4
            else:
                ips = ips_v4 + ips_v6

            for ip in ips:
                uris.append(
                    syncuri.replace("//" + user_name + hostname + port + "/",
                                    "//" + user_name + ip + port + "/", 1))

        if not uris:
            # With some configurations we need to use the plain hostname
            # rather than try to resolve the ip addresses (bug #340817).
            uris.append(syncuri)

        # reverse, for use with pop()
        uris.reverse()

        effective_maxretries = maxretries
        if effective_maxretries < 0:
            effective_maxretries = len(uris) - 1

        while (1):
            if uris:
                dosyncuri = uris.pop()
            else:
                writemsg("!!! Exhausted addresses for %s\n" %
                         _unicode_decode(hostname),
                         noiselevel=-1)
                return (1, False)

            if (retries == 0):
                if "--ask" in opts:
                    uq = UserQuery(opts)
                    if uq.query("Do you want to sync your Portage tree " + \
                     "with the mirror at\n" + blue(dosyncuri) + bold("?"),
                     enter_invalid) == "No":
                        print()
                        print("Quitting.")
                        print()
                        sys.exit(128 + signal.SIGINT)
                self.logger(self.xterm_titles,
                            ">>> Starting rsync with " + dosyncuri)
                if "--quiet" not in opts:
                    print(">>> Starting rsync with " + dosyncuri + "...")
            else:
                self.logger(self.xterm_titles,
                 ">>> Starting retry %d of %d with %s" % \
                  (retries, effective_maxretries, dosyncuri))
                writemsg_stdout(
                 "\n\n>>> Starting retry %d of %d with %s\n" % \
                 (retries, effective_maxretries, dosyncuri), noiselevel=-1)

            if dosyncuri.startswith('ssh://'):
                dosyncuri = dosyncuri[6:].replace('/', ':/', 1)

            is_synced, exitcode = self._do_rsync(dosyncuri, timestamp, opts)
            if is_synced:
                break

            retries = retries + 1

            if maxretries < 0 or retries <= maxretries:
                print(">>> Retrying...")
            else:
                # over retries
                # exit loop
                updatecache_flg = False
                exitcode = EXCEEDED_MAX_RETRIES
                break
        self._process_exitcode(exitcode, dosyncuri, out, maxretries)
        return (exitcode, updatecache_flg)
Example #27
0
    def check_with_cmake(self):
        dirname = os.path.dirname(__file__)
        cmake_command = ['cmake', dirname]
        try:
            cmake_command += ['-G', os.environ['GENERATOR']]
        except KeyError:
            pass
        print(yellow('calling: ' + cmake_command[0] + ' ' + cmake_command[1]))
        cmake = subprocess.Popen(cmake_command,
                                 stdout=subprocess.PIPE,
                                 stderr=subprocess.PIPE)
        cmake_out, cmake_err = cmake.communicate()
        coin_vars = [
            'COIN_FOUND', 'COIN_VERSION', 'COIN_INCLUDE_DIR', 'COIN_LIB_DIR'
        ]
        soqt_vars = [
            'SOQT_FOUND', 'SOQT_VERSION', 'SOQT_INCLUDE_DIR', 'SOQT_LIB_DIR'
        ]
        config_dict = {}
        print(yellow(cmake_out.decode("utf-8")))
        print(red(cmake_err.decode("utf-8")))
        if cmake.returncode == 0:
            for line in cmake_out.decode("utf-8").split("\n"):
                for var in coin_vars + soqt_vars:
                    if var in line:
                        line = (line.replace('-- ' + var,
                                             '').replace(': ',
                                                         '').replace('\n', ''))
                        if "INCLUDE" in var:
                            line = line.replace(';', '\" -I\"')
                        config_dict[var] = line

        print(yellow('\nchecking for COIN via cmake'))
        for key in coin_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        print(yellow('\nchecking for SOQT via cmake'))
        for key in soqt_vars:
            if key in config_dict:
                print(blue(key + ': ' + config_dict[key]))

        if config_dict.get('SOQT_FOUND', 'false') == 'false':
            pivy_build.MODULES.pop('soqt')
            print(red("\ndisable soqt, because cmake couldn't find it"))
        else:
            try:
                import qtinfo
                self.QTINFO = qtinfo.QtInfo()
            except Exception as e:
                import traceback
                print(
                    red("\ndisable soqt, because there was a problem running qtinfo (needs qmake)"
                        ))
                print(red("-" * 60))
                print(red(traceback.print_exc()))
                print(red("-" * 60))
                pivy_build.MODULES.pop('soqt')

        self.cmake_config_dict = config_dict
        if self.cmake_config_dict.get('COIN_FOUND', 'false') == 'false':
            raise (RuntimeError(
                'coin was not found, but you need coin to build pivy'))
Example #28
0
def main(settings=None, logger=None):
	"""Main program operation method....
	
	@param settings: dict.  defaults to settings.DEFAULTS
	@param logger: python logging module defaults to init_logger(settings)
	@return boolean  success/failure
	"""

	if settings is None:
		print("NO Input settings, using defaults...")
		settings = DEFAULTS.copy()

	if logger is None:
		logger = init_logger(settings)

	_libs_to_check = settings['library']

	if not settings['stdout'].isatty() or settings['nocolor']:
		nocolor()

	#TODO: Development warning
	logger.warn(blue(' * ') + 
		yellow('This is a development version, '
			'so it may not work correctly'))
	logger.warn(blue(' * ') + 
		yellow('The original revdep-rebuild script is '
			'installed as revdep-rebuild.sh'))

	if os.getuid() != 0 and not settings['PRETEND']:
		logger.warn(blue(' * ') + 
			yellow('You are not root, adding --pretend to portage options'))
		settings['PRETEND'] = True

	if settings['library']:
		logger.warn(green(' * ') + 
			"Looking for libraries: %s" % (bold(', '.join(settings['library']))))

	if settings['USE_TMP_FILES'] \
			and check_temp_files(settings['DEFAULT_TMP_DIR'], logger=logger):
		libraries, la_libraries, libraries_links, binaries = read_cache(
			settings['DEFAULT_TMP_DIR'])
		assigned = analyse(
			settings=settings,
			logger=logger,
			libraries=libraries,
			la_libraries=la_libraries, 
			libraries_links=libraries_links,
			binaries=binaries,
			_libs_to_check=_libs_to_check)
	else:
		assigned = analyse(settings, logger, _libs_to_check=_libs_to_check)

	if not assigned:
		logger.warn('\n' + bold('Your system is consistent'))
		# return the correct exit code
		return 0

	has_masked = False
	tmp = []
	for ebuild in assigned:
		if get_masking_status(ebuild):
			has_masked = True
			logger.warn('!!! ' + red('All ebuilds that could satisfy: ') + 
				green(ebuild) + red(' have been masked'))
		else:
			tmp.append(ebuild)
	assigned = tmp

	if has_masked:
		logger.info(red(' * ') + 
			'Unmask all ebuild(s) listed above and call revdep-rebuild '
			'again or manually emerge given packages.')

	success = rebuild(logger, assigned, settings)
	logger.debug("rebuild return code = %i" %success)
	return success
Example #29
0
def useflag(string, enabled=True):
	"""Returns a USE flag string."""
	return output.red(string) if enabled else output.blue(string)
Example #30
0
 def check_python_version(self):
     "check the Python version"
     print blue("Python version...%s" % sys.version.split(" ")[0])
     if int(sys.version[0]) < 2:
         print red("Pivy only works with Python versions >= 2.0.")
         sys.exit(1)
    sys.stdout.flush()


def formatSize(x):
    x = int(x)
    if x < 1024:
        return str(x) + ' B'
    if x < 1024 * 1024:
        return '%.2f KB' % (float(x) / 1024)
    return '%.2f MB' % (float(x) / (1024.0 * 1024.0))


packages = {}
total_size = 0

print output.blue('Counting...  ')

counter = 0
for category in sorted(os.listdir('/var/db/pkg/')):

    print output.green(' *'), category, ' ',
    update_spinner()

    for package in sorted(os.listdir('/var/db/pkg/' + category)):

        fh = file('/var/db/pkg/' + category + '/' + package + '/CONTENTS')
        content = fh.readlines()
        fh.close()

        size = 0
        for entity in content:
Example #32
0
def useflag(string, enabled=True):
    """Returns a USE flag string."""
    return output.red(string) if enabled else output.blue(string)
Example #33
0
def analyse(
    settings, logger, libraries=None, la_libraries=None, libraries_links=None, binaries=None, _libs_to_check=set()
):
    """Main program body.  It will collect all info and determine the
	pkgs needing rebuilding.

	@param logger: logger used for logging messages, instance of logging.Logger
				   class. Can be logging (RootLogger).
	@param _libs_to_check Libraries that need to be checked only
	@rtype list: list of pkgs that need rebuilding
	"""

    if libraries and la_libraries and libraries_links and binaries:
        logger.info(blue(" * ") + bold("Found a valid cache, skipping collecting phase"))
    else:
        # TODO: add partial cache (for ex. only libraries)
        # when found for some reason

        logger.warn(green(" * ") + bold("Collecting system binaries and libraries"))
        bin_dirs, lib_dirs = prepare_search_dirs(logger, settings)

        masked_dirs, masked_files, ld = parse_revdep_config(settings["REVDEP_CONFDIR"])
        lib_dirs.update(ld)
        bin_dirs.update(ld)
        masked_dirs.update(["/lib/modules", "/lib32/modules", "/lib64/modules"])

        logger.info(green(" * ") + bold("Collecting dynamic linking informations"))
        libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(
            lib_dirs, masked_dirs, logger
        )
        binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logger)

        if settings["USE_TMP_FILES"]:
            save_cache(
                logger=logger,
                to_save={
                    "libraries": libraries,
                    "la_libraries": la_libraries,
                    "libraries_links": libraries_links,
                    "binaries": binaries,
                },
                temp_path=settings["DEFAULT_TMP_DIR"],
            )

    logger.debug(
        "Found "
        + str(len(libraries))
        + " libraries (+"
        + str(len(libraries_links))
        + " symlinks) and "
        + str(len(binaries))
        + " binaries"
    )

    logger.warn(green(" * ") + bold("Checking dynamic linking consistency"))
    logger.debug(
        "Search for " + str(len(binaries) + len(libraries)) + " within " + str(len(libraries) + len(libraries_links))
    )
    libs_and_bins = libraries + binaries

    found_libs = []
    dependencies = []

    if _libs_to_check:
        nltc = []
        for ltc in _libs_to_check:
            if os.path.isfile(ltc):
                ltc = scan(["-nBSF", "%S"], [ltc], settings["CMD_MAX_ARGS"])[0].split()[0]
            nltc += [ltc]
        _libs_to_check = nltc

    _bits, linkg = platform.architecture()
    if _bits.startswith("32"):
        bits = 32
    elif _bits.startswith("64"):
        bits = 64

    broken = []
    for av_bits in glob.glob("/lib[0-9]*") or ("/lib32",):
        bits = int(av_bits[4:])

        _libraries = libraries + libraries_links

        found_libs, dependencies = prepare_checks(libs_and_bins, _libraries, bits, settings["CMD_MAX_ARGS"])
        broken = find_broken(found_libs, _libraries, _libs_to_check)

        bits /= 2
        bits = int(bits)

    broken_la = extract_dependencies_from_la(la_libraries, libraries + libraries_links, _libs_to_check, logger)

    broken_pathes = main_checks(found_libs, broken, dependencies, logger)
    broken_pathes += broken_la

    logger.warn(green(" * ") + bold("Assign files to packages"))

    return assign_packages(broken_pathes, logger, settings)
Example #34
0
 def check_python_version(self):
     "check the Python version"
     print(blue("Python version...%s" % sys.version.split(" ")[0]))
     if int(sys.version[0]) < 2:
         print(red("Pivy only works with Python versions >= 2.0."))
         sys.exit(1)
Example #35
0
    def update(self):
        """Internal update function which performs the transfer"""
        opts = self.options.get("emerge_config").opts
        self.usersync_uid = self.options.get("usersync_uid", None)
        enter_invalid = "--ask-enter-invalid" in opts
        quiet = "--quiet" in opts
        out = portage.output.EOutput(quiet=quiet)
        syncuri = self.repo.sync_uri
        if self.repo.module_specific_options.get("sync-rsync-vcs-ignore",
                                                 "false").lower() in ("true",
                                                                      "yes"):
            vcs_dirs = ()
        else:
            vcs_dirs = frozenset(VCS_DIRS)
            vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location))

        for vcs_dir in vcs_dirs:
            writemsg_level(
                ("!!! %s appears to be under revision " +
                 "control (contains %s).\n!!! Aborting rsync sync "
                 '(override with "sync-rsync-vcs-ignore = true" in repos.conf).\n'
                 ) % (self.repo.location, vcs_dir),
                level=logging.ERROR,
                noiselevel=-1,
            )
            return (1, False)
        self.timeout = 180

        rsync_opts = []
        if self.settings["PORTAGE_RSYNC_OPTS"] == "":
            rsync_opts = self._set_rsync_defaults()
        else:
            rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri)
        self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts)

        self.extra_rsync_opts = list()
        if self.repo.module_specific_options.get("sync-rsync-extra-opts"):
            self.extra_rsync_opts.extend(
                portage.util.shlex_split(
                    self.repo.module_specific_options["sync-rsync-extra-opts"])
            )

        exitcode = 0
        verify_failure = False

        # Process GLEP74 verification options.
        # Default verification to 'no'; it's enabled for ::gentoo
        # via default repos.conf though.
        self.verify_metamanifest = self.repo.module_specific_options.get(
            "sync-rsync-verify-metamanifest", "no").lower() in ("yes", "true")
        # Support overriding job count.
        self.verify_jobs = self.repo.module_specific_options.get(
            "sync-rsync-verify-jobs", None)
        if self.verify_jobs is not None:
            try:
                self.verify_jobs = int(self.verify_jobs)
                if self.verify_jobs < 0:
                    raise ValueError(self.verify_jobs)
            except ValueError:
                writemsg_level(
                    "!!! sync-rsync-verify-jobs not a positive integer: %s\n" %
                    (self.verify_jobs, ),
                    level=logging.WARNING,
                    noiselevel=-1,
                )
                self.verify_jobs = None
            else:
                if self.verify_jobs == 0:
                    # Use the apparent number of processors if gemato
                    # supports it.
                    self.verify_jobs = None
        # Support overriding max age.
        self.max_age = self.repo.module_specific_options.get(
            "sync-rsync-verify-max-age", "")
        if self.max_age:
            try:
                self.max_age = int(self.max_age)
                if self.max_age < 0:
                    raise ValueError(self.max_age)
            except ValueError:
                writemsg_level(
                    "!!! sync-rsync-max-age must be a non-negative integer: %s\n"
                    % (self.max_age, ),
                    level=logging.WARNING,
                    noiselevel=-1,
                )
                self.max_age = 0
        else:
            self.max_age = 0

        openpgp_env = None
        if self.verify_metamanifest and gemato is not None:
            # Use isolated environment if key is specified,
            # system environment otherwise
            openpgp_env = self._get_openpgp_env(
                self.repo.sync_openpgp_key_path)

        try:
            # Load and update the keyring early. If it fails, then verification
            # will not be performed and the user will have to fix it and try again,
            # so we may as well bail out before actual rsync happens.
            if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
                try:
                    out.einfo("Using keys from %s" %
                              (self.repo.sync_openpgp_key_path, ))
                    with io.open(self.repo.sync_openpgp_key_path, "rb") as f:
                        openpgp_env.import_key(f)
                    self._refresh_keys(openpgp_env)
                except (GematoException, asyncio.TimeoutError) as e:
                    writemsg_level(
                        "!!! Manifest verification impossible due to keyring problem:\n%s\n"
                        % (e, ),
                        level=logging.ERROR,
                        noiselevel=-1,
                    )
                    return (1, False)

            # Real local timestamp file.
            self.servertimestampfile = os.path.join(self.repo.location,
                                                    "metadata",
                                                    "timestamp.chk")

            content = portage.util.grabfile(self.servertimestampfile)
            timestamp = 0
            if content:
                try:
                    timestamp = time.mktime(
                        time.strptime(content[0], TIMESTAMP_FORMAT))
                except (OverflowError, ValueError):
                    pass
            del content

            try:
                self.rsync_initial_timeout = int(
                    self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
            except ValueError:
                self.rsync_initial_timeout = 15

            try:
                maxretries = int(self.settings["PORTAGE_RSYNC_RETRIES"])
            except SystemExit as e:
                raise  # Needed else can't exit
            except:
                maxretries = -1  # default number of retries

            if syncuri.startswith("file://"):
                self.proto = "file"
                dosyncuri = syncuri[7:]
                unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
                    dosyncuri, timestamp, opts)
                self._process_exitcode(exitcode, dosyncuri, out, 1)
                if exitcode == 0:
                    if unchanged:
                        self.repo_storage.abort_update()
                    else:
                        self.repo_storage.commit_update()
                        self.repo_storage.garbage_collection()
                return (exitcode, updatecache_flg)

            retries = 0
            try:
                self.proto, user_name, hostname, port = re.split(
                    r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
                    syncuri,
                    maxsplit=4,
                )[1:5]
            except ValueError:
                writemsg_level(
                    "!!! sync-uri is invalid: %s\n" % syncuri,
                    noiselevel=-1,
                    level=logging.ERROR,
                )
                return (1, False)

            self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS")

            if port is None:
                port = ""
            if user_name is None:
                user_name = ""
            if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None:
                getaddrinfo_host = hostname
            else:
                # getaddrinfo needs the brackets stripped
                getaddrinfo_host = hostname[1:-1]
            updatecache_flg = False
            all_rsync_opts = set(self.rsync_opts)
            all_rsync_opts.update(self.extra_rsync_opts)

            family = socket.AF_UNSPEC
            if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
                family = socket.AF_INET
            elif socket.has_ipv6 and ("-6" in all_rsync_opts
                                      or "--ipv6" in all_rsync_opts):
                family = socket.AF_INET6

            addrinfos = None
            uris = []

            if "RSYNC_PROXY" not in self.spawn_kwargs["env"]:
                try:
                    addrinfos = getaddrinfo_validate(
                        socket.getaddrinfo(getaddrinfo_host, None, family,
                                           socket.SOCK_STREAM))
                except socket.error as e:
                    writemsg_level(
                        "!!! getaddrinfo failed for '%s': %s\n" %
                        (_unicode_decode(hostname), str(e)),
                        noiselevel=-1,
                        level=logging.ERROR,
                    )

            if addrinfos:

                AF_INET = socket.AF_INET
                AF_INET6 = None
                if socket.has_ipv6:
                    AF_INET6 = socket.AF_INET6

                ips_v4 = []
                ips_v6 = []

                for addrinfo in addrinfos:
                    if addrinfo[0] == AF_INET:
                        ips_v4.append("%s" % addrinfo[4][0])
                    elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
                        # IPv6 addresses need to be enclosed in square brackets
                        ips_v6.append("[%s]" % addrinfo[4][0])

                random.shuffle(ips_v4)
                random.shuffle(ips_v6)

                # Give priority to the address family that
                # getaddrinfo() returned first.
                if AF_INET6 is not None and addrinfos and addrinfos[0][
                        0] == AF_INET6:
                    ips = ips_v6 + ips_v4
                else:
                    ips = ips_v4 + ips_v6

                for ip in ips:
                    uris.append(
                        syncuri.replace(
                            "//" + user_name + hostname + port + "/",
                            "//" + user_name + ip + port + "/",
                            1,
                        ))

            if not uris:
                # With some configurations we need to use the plain hostname
                # rather than try to resolve the ip addresses (bug #340817).
                uris.append(syncuri)
            elif len(uris) == 1:
                # Use the original hostname if it resolves to a single IP,
                # since DNS lookup must occur in the rsync process for
                # compatibility with things like proxychains that allocate
                # a surrogate IP which is only valid within the current
                # process.
                uris = [syncuri]

            # reverse, for use with pop()
            uris.reverse()
            uris_orig = uris[:]

            effective_maxretries = maxretries
            if effective_maxretries < 0:
                effective_maxretries = len(uris) - 1

            local_state_unchanged = True
            while 1:
                if uris:
                    dosyncuri = uris.pop()
                elif maxretries < 0 or retries > maxretries:
                    writemsg(
                        "!!! Exhausted addresses for %s\n" %
                        _unicode_decode(hostname),
                        noiselevel=-1,
                    )
                    return (1, False)
                else:
                    uris.extend(uris_orig)
                    dosyncuri = uris.pop()

                if retries == 0:
                    if "--ask" in opts:
                        uq = UserQuery(opts)
                        if (uq.query(
                                "Do you want to sync your ebuild repository " +
                                "with the mirror at\n" + blue(dosyncuri) +
                                bold("?"),
                                enter_invalid,
                        ) == "No"):
                            print()
                            print("Quitting.")
                            print()
                            sys.exit(128 + signal.SIGINT)
                    self.logger(self.xterm_titles,
                                ">>> Starting rsync with " + dosyncuri)
                    if "--quiet" not in opts:
                        print(">>> Starting rsync with " + dosyncuri + "...")
                else:
                    self.logger(
                        self.xterm_titles,
                        ">>> Starting retry %d of %d with %s" %
                        (retries, effective_maxretries, dosyncuri),
                    )
                    writemsg_stdout(
                        "\n\n>>> Starting retry %d of %d with %s\n" %
                        (retries, effective_maxretries, dosyncuri),
                        noiselevel=-1,
                    )

                if dosyncuri.startswith("ssh://"):
                    dosyncuri = dosyncuri[6:].replace("/", ":/", 1)

                unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
                    dosyncuri, timestamp, opts)
                if not unchanged:
                    local_state_unchanged = False
                if is_synced:
                    break

                retries = retries + 1

                if maxretries < 0 or retries <= maxretries:
                    print(">>> Retrying...")
                else:
                    # over retries
                    # exit loop
                    exitcode = EXCEEDED_MAX_RETRIES
                    break

            self._process_exitcode(exitcode, dosyncuri, out, maxretries)

            if local_state_unchanged:
                # The quarantine download_dir is not intended to exist
                # in this case, so refer gemato to the normal repository
                # location.
                download_dir = self.repo.location
            else:
                download_dir = self.download_dir

            # if synced successfully, verify now
            if exitcode == 0 and self.verify_metamanifest:
                if gemato is None:
                    writemsg_level(
                        "!!! Unable to verify: gemato-14.5+ is required\n",
                        level=logging.ERROR,
                        noiselevel=-1,
                    )
                    exitcode = 127
                else:
                    try:
                        # we always verify the Manifest signature, in case
                        # we had to deal with key revocation case
                        m = gemato.recursiveloader.ManifestRecursiveLoader(
                            os.path.join(download_dir, "Manifest"),
                            verify_openpgp=True,
                            openpgp_env=openpgp_env,
                            max_jobs=self.verify_jobs,
                        )
                        if not m.openpgp_signed:
                            raise RuntimeError(
                                "OpenPGP signature not found on Manifest")

                        ts = m.find_timestamp()
                        if ts is None:
                            raise RuntimeError(
                                "Timestamp not found in Manifest")
                        if (self.max_age != 0
                                and (datetime.datetime.utcnow() - ts.ts).days >
                                self.max_age):
                            out.quiet = False
                            out.ewarn(
                                "Manifest is over %d days old, this is suspicious!"
                                % (self.max_age, ))
                            out.ewarn(
                                "You may want to try using another mirror and/or reporting this one:"
                            )
                            out.ewarn("  %s" % (dosyncuri, ))
                            out.ewarn("")
                            out.quiet = quiet

                        out.einfo("Manifest timestamp: %s UTC" % (ts.ts, ))
                        out.einfo("Valid OpenPGP signature found:")
                        out.einfo(
                            "- primary key: %s" %
                            (m.openpgp_signature.primary_key_fingerprint))
                        out.einfo("- subkey: %s" %
                                  (m.openpgp_signature.fingerprint))
                        out.einfo("- timestamp: %s UTC" %
                                  (m.openpgp_signature.timestamp))

                        # if nothing has changed, skip the actual Manifest
                        # verification
                        if not local_state_unchanged:
                            out.ebegin("Verifying %s" % (download_dir, ))
                            m.assert_directory_verifies()
                            out.eend(0)
                    except GematoException as e:
                        writemsg_level(
                            "!!! Manifest verification failed:\n%s\n" % (e, ),
                            level=logging.ERROR,
                            noiselevel=-1,
                        )
                        exitcode = 1
                        verify_failure = True

            if exitcode == 0 and not local_state_unchanged:
                self.repo_storage.commit_update()
                self.repo_storage.garbage_collection()

            return (exitcode, updatecache_flg)
        finally:
            # Don't delete the update if verification failed, in case
            # the cause needs to be investigated.
            if not verify_failure:
                self.repo_storage.abort_update()
            if openpgp_env is not None:
                openpgp_env.close()
Example #36
0
def analyse(settings, logger, libraries=None, la_libraries=None,
		libraries_links=None, binaries=None, _libs_to_check=None):
	"""Main program body.  It will collect all info and determine the
	pkgs needing rebuilding.

	@param logger: logger used for logging messages, instance of logging.Logger
				   class. Can be logging (RootLogger).
	@param _libs_to_check Libraries that need to be checked only
	@rtype list: list of pkgs that need rebuilding
	"""

	searchbits = set()
	'''if _libs_to_check:
		for lib in _libs_to_check:
			if "lib64" in lib:
				searchbits.add('64')
			elif "lib32" in lib:
				searchbits.add('32')
	else:
		_libs_to_check = set()'''
	searchbits.update(['64', '32'])

	masked_dirs, masked_files, ld = parse_revdep_config(settings['REVDEP_CONFDIR'])
	masked_dirs.update([
		'/lib/modules',
		'/lib32/modules',
		'/lib64/modules',
		]
	)

	if '64' not in searchbits:
		masked_dirs.update(['/lib64', '/usr/lib64'])
	elif '32' not in searchbits:
		masked_dirs.update(['/lib32', '/usr/lib32'])

	all_masks = masked_dirs.copy()
	all_masks.update(masked_files)
	logger.debug("\tall_masks:")
	for x in sorted(all_masks):
		logger.debug('\t\t%s' % (x))

	if libraries and la_libraries and libraries_links and binaries:
		logger.info(blue(' * ') +
			bold('Found a valid cache, skipping collecting phase'))
	else:
		#TODO: add partial cache (for ex. only libraries)
		# when found for some reason

		stime = current_milli_time()
		logger.warning(green(' * ') +
			bold('Collecting system binaries and libraries'))
		bin_dirs, lib_dirs = prepare_search_dirs(logger, settings)

		lib_dirs.update(ld)
		bin_dirs.update(ld)

		logger.debug('\tanalyse(), bin directories:')
		for x in sorted(bin_dirs):
			logger.debug('\t\t%s' % (x))
		logger.debug('\tanalyse(), lib directories:')
		for x in sorted(lib_dirs):
			logger.debug('\t\t%s' % (x))
		logger.debug('\tanalyse(), masked directories:')
		for x in sorted(masked_dirs):
			logger.debug('\t\t%s' % (x))
		logger.debug('\tanalyse(), masked files:')
		for x in sorted(masked_files):
			logger.debug('\t\t%s' % (x))

		ftime = current_milli_time()
		logger.debug('\ttime to complete task: %d milliseconds' % (ftime-stime))
		stime = current_milli_time()
		logger.info(green(' * ') +
			bold('Collecting dynamic linking informations'))

		libraries, la_libraries, libraries_links = \
			collect_libraries_from_dir(lib_dirs, all_masks, logger)
		binaries = collect_binaries_from_dir(bin_dirs, all_masks, logger)
		ftime = current_milli_time()
		logger.debug('\ttime to complete task: %d milliseconds' % (ftime-stime))

		if settings['USE_TMP_FILES']:
			save_cache(logger=logger,
				to_save={'libraries':libraries, 'la_libraries':la_libraries,
					'libraries_links':libraries_links, 'binaries':binaries
				},
			temp_path=settings['DEFAULT_TMP_DIR']
			)


	logger.debug('\tanalyse(), Found %i libraries (+%i symlinks) and %i binaries' %
		(len(libraries), len(libraries_links), len(binaries))
	)
	logger.info(green(' * ') + bold('Scanning files'))

	libs_and_bins = libraries.union(binaries)

	scanned_files = scan_files(libs_and_bins, settings['CMD_MAX_ARGS'],
		logger, searchbits)

	logger.warning(green(' * ') + bold('Checking dynamic linking consistency'))
	logger.debug(
		'\tanalyse(), Searching for %i libs, bins within %i libraries and links'
		% (len(libs_and_bins), len(libraries)+len(libraries_links))
	)

	libcheck = LibCheck(scanned_files, logger, _libs_to_check, searchbits,
						all_masks, masked_dirs)

	broken_pathes = libcheck.process_results(libcheck.search())

	broken_la = extract_dependencies_from_la(la_libraries,
		libraries.union(libraries_links), _libs_to_check, logger)
	broken_pathes += broken_la

	if broken_pathes:
		logger.warning(green(' * ') + bold('Assign files to packages'))
		return assign_packages(broken_pathes, logger, settings)
	return None, None # no need to assign anything
Example #37
0
	def update(self):
		'''Internal update function which performs the transfer'''
		opts = self.options.get('emerge_config').opts
		self.usersync_uid = self.options.get('usersync_uid', None)
		enter_invalid = '--ask-enter-invalid' in opts
		quiet = '--quiet' in opts
		out = portage.output.EOutput(quiet=quiet)
		syncuri = self.repo.sync_uri
		if self.repo.module_specific_options.get(
			'sync-rsync-vcs-ignore', 'false').lower() == 'true':
			vcs_dirs = ()
		else:
			vcs_dirs = frozenset(VCS_DIRS)
			vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location))

		for vcs_dir in vcs_dirs:
			writemsg_level(("!!! %s appears to be under revision " + \
				"control (contains %s).\n!!! Aborting rsync sync "
				"(override with \"sync-rsync-vcs-ignore = true\" in repos.conf).\n") % \
				(self.repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1)
			return (1, False)
		self.timeout=180

		rsync_opts = []
		if self.settings["PORTAGE_RSYNC_OPTS"] == "":
			rsync_opts = self._set_rsync_defaults()
		else:
			rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri)
		self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts)

		self.extra_rsync_opts = list()
		if self.repo.module_specific_options.get('sync-rsync-extra-opts'):
			self.extra_rsync_opts.extend(portage.util.shlex_split(
				self.repo.module_specific_options['sync-rsync-extra-opts']))

		exitcode = 0
		verify_failure = False

		# Process GLEP74 verification options.
		# Default verification to 'no'; it's enabled for ::gentoo
		# via default repos.conf though.
		self.verify_metamanifest = (
				self.repo.module_specific_options.get(
					'sync-rsync-verify-metamanifest', 'no') in ('yes', 'true'))
		# Support overriding job count.
		self.verify_jobs = self.repo.module_specific_options.get(
				'sync-rsync-verify-jobs', None)
		if self.verify_jobs is not None:
			try:
				self.verify_jobs = int(self.verify_jobs)
				if self.verify_jobs < 0:
					raise ValueError(self.verify_jobs)
			except ValueError:
				writemsg_level("!!! sync-rsync-verify-jobs not a positive integer: %s\n" % (self.verify_jobs,),
					level=logging.WARNING, noiselevel=-1)
				self.verify_jobs = None
			else:
				if self.verify_jobs == 0:
					# Use the apparent number of processors if gemato
					# supports it.
					self.verify_jobs = None
		# Support overriding max age.
		self.max_age = self.repo.module_specific_options.get(
				'sync-rsync-verify-max-age', '')
		if self.max_age:
			try:
				self.max_age = int(self.max_age)
				if self.max_age < 0:
					raise ValueError(self.max_age)
			except ValueError:
				writemsg_level("!!! sync-rsync-max-age must be a non-negative integer: %s\n" % (self.max_age,),
					level=logging.WARNING, noiselevel=-1)
				self.max_age = 0
		else:
			self.max_age = 0

		openpgp_env = None
		if self.verify_metamanifest and gemato is not None:
			# Use isolated environment if key is specified,
			# system environment otherwise
			if self.repo.sync_openpgp_key_path is not None:
				openpgp_env = gemato.openpgp.OpenPGPEnvironment()
			else:
				openpgp_env = gemato.openpgp.OpenPGPSystemEnvironment()

		try:
			# Load and update the keyring early. If it fails, then verification
			# will not be performed and the user will have to fix it and try again,
			# so we may as well bail out before actual rsync happens.
			if openpgp_env is not None and self.repo.sync_openpgp_key_path is not None:
				try:
					out.einfo('Using keys from %s' % (self.repo.sync_openpgp_key_path,))
					with io.open(self.repo.sync_openpgp_key_path, 'rb') as f:
						openpgp_env.import_key(f)
					self._refresh_keys(openpgp_env)
				except (GematoException, asyncio.TimeoutError) as e:
					writemsg_level("!!! Manifest verification impossible due to keyring problem:\n%s\n"
							% (e,),
							level=logging.ERROR, noiselevel=-1)
					return (1, False)

			# Real local timestamp file.
			self.servertimestampfile = os.path.join(
				self.repo.location, "metadata", "timestamp.chk")

			content = portage.util.grabfile(self.servertimestampfile)
			timestamp = 0
			if content:
				try:
					timestamp = time.mktime(time.strptime(content[0],
						TIMESTAMP_FORMAT))
				except (OverflowError, ValueError):
					pass
			del content

			try:
				self.rsync_initial_timeout = \
					int(self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
			except ValueError:
				self.rsync_initial_timeout = 15

			try:
				maxretries=int(self.settings["PORTAGE_RSYNC_RETRIES"])
			except SystemExit as e:
				raise # Needed else can't exit
			except:
				maxretries = -1 #default number of retries

			if syncuri.startswith("file://"):
				self.proto = "file"
				dosyncuri = syncuri[7:]
				unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
					dosyncuri, timestamp, opts)
				self._process_exitcode(exitcode, dosyncuri, out, 1)
				if exitcode == 0:
					if unchanged:
						self.repo_storage.abort_update()
					else:
						self.repo_storage.commit_update()
						self.repo_storage.garbage_collection()
				return (exitcode, updatecache_flg)

			retries=0
			try:
				self.proto, user_name, hostname, port = re.split(
					r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
					syncuri, maxsplit=4)[1:5]
			except ValueError:
				writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri,
					noiselevel=-1, level=logging.ERROR)
				return (1, False)

			self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS")

			if port is None:
				port=""
			if user_name is None:
				user_name=""
			if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None:
				getaddrinfo_host = hostname
			else:
				# getaddrinfo needs the brackets stripped
				getaddrinfo_host = hostname[1:-1]
			updatecache_flg = False
			all_rsync_opts = set(self.rsync_opts)
			all_rsync_opts.update(self.extra_rsync_opts)

			family = socket.AF_UNSPEC
			if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
				family = socket.AF_INET
			elif socket.has_ipv6 and \
				("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
				family = socket.AF_INET6

			addrinfos = None
			uris = []

			try:
				addrinfos = getaddrinfo_validate(
					socket.getaddrinfo(getaddrinfo_host, None,
					family, socket.SOCK_STREAM))
			except socket.error as e:
				writemsg_level(
					"!!! getaddrinfo failed for '%s': %s\n"
					% (_unicode_decode(hostname), _unicode(e)),
					noiselevel=-1, level=logging.ERROR)

			if addrinfos:

				AF_INET = socket.AF_INET
				AF_INET6 = None
				if socket.has_ipv6:
					AF_INET6 = socket.AF_INET6

				ips_v4 = []
				ips_v6 = []

				for addrinfo in addrinfos:
					if addrinfo[0] == AF_INET:
						ips_v4.append("%s" % addrinfo[4][0])
					elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
						# IPv6 addresses need to be enclosed in square brackets
						ips_v6.append("[%s]" % addrinfo[4][0])

				random.shuffle(ips_v4)
				random.shuffle(ips_v6)

				# Give priority to the address family that
				# getaddrinfo() returned first.
				if AF_INET6 is not None and addrinfos and \
					addrinfos[0][0] == AF_INET6:
					ips = ips_v6 + ips_v4
				else:
					ips = ips_v4 + ips_v6

				for ip in ips:
					uris.append(syncuri.replace(
						"//" + user_name + hostname + port + "/",
						"//" + user_name + ip + port + "/", 1))

			if not uris:
				# With some configurations we need to use the plain hostname
				# rather than try to resolve the ip addresses (bug #340817).
				uris.append(syncuri)

			# reverse, for use with pop()
			uris.reverse()
			uris_orig = uris[:]

			effective_maxretries = maxretries
			if effective_maxretries < 0:
				effective_maxretries = len(uris) - 1

			local_state_unchanged = True
			while (1):
				if uris:
					dosyncuri = uris.pop()
				elif maxretries < 0 or retries > maxretries:
					writemsg("!!! Exhausted addresses for %s\n"
						% _unicode_decode(hostname), noiselevel=-1)
					return (1, False)
				else:
					uris.extend(uris_orig)
					dosyncuri = uris.pop()

				if (retries==0):
					if "--ask" in opts:
						uq = UserQuery(opts)
						if uq.query("Do you want to sync your ebuild repository " + \
							"with the mirror at\n" + blue(dosyncuri) + bold("?"),
							enter_invalid) == "No":
							print()
							print("Quitting.")
							print()
							sys.exit(128 + signal.SIGINT)
					self.logger(self.xterm_titles,
						">>> Starting rsync with " + dosyncuri)
					if "--quiet" not in opts:
						print(">>> Starting rsync with "+dosyncuri+"...")
				else:
					self.logger(self.xterm_titles,
						">>> Starting retry %d of %d with %s" % \
							(retries, effective_maxretries, dosyncuri))
					writemsg_stdout(
						"\n\n>>> Starting retry %d of %d with %s\n" % \
						(retries, effective_maxretries, dosyncuri), noiselevel=-1)

				if dosyncuri.startswith('ssh://'):
					dosyncuri = dosyncuri[6:].replace('/', ':/', 1)

				unchanged, is_synced, exitcode, updatecache_flg = self._do_rsync(
					dosyncuri, timestamp, opts)
				if not unchanged:
					local_state_unchanged = False
				if is_synced:
					break

				retries=retries+1

				if maxretries < 0 or retries <= maxretries:
					print(">>> Retrying...")
				else:
					# over retries
					# exit loop
					exitcode = EXCEEDED_MAX_RETRIES
					break

			self._process_exitcode(exitcode, dosyncuri, out, maxretries)

			if local_state_unchanged:
				# The quarantine download_dir is not intended to exist
				# in this case, so refer gemato to the normal repository
				# location.
				download_dir = self.repo.location
			else:
				download_dir = self.download_dir

			# if synced successfully, verify now
			if exitcode == 0 and self.verify_metamanifest:
				if gemato is None:
					writemsg_level("!!! Unable to verify: gemato-11.0+ is required\n",
						level=logging.ERROR, noiselevel=-1)
					exitcode = 127
				else:
					try:
						# we always verify the Manifest signature, in case
						# we had to deal with key revocation case
						m = gemato.recursiveloader.ManifestRecursiveLoader(
								os.path.join(download_dir, 'Manifest'),
								verify_openpgp=True,
								openpgp_env=openpgp_env,
								max_jobs=self.verify_jobs)
						if not m.openpgp_signed:
							raise RuntimeError('OpenPGP signature not found on Manifest')

						ts = m.find_timestamp()
						if ts is None:
							raise RuntimeError('Timestamp not found in Manifest')
						if (self.max_age != 0 and
								(datetime.datetime.utcnow() - ts.ts).days > self.max_age):
							out.quiet = False
							out.ewarn('Manifest is over %d days old, this is suspicious!' % (self.max_age,))
							out.ewarn('You may want to try using another mirror and/or reporting this one:')
							out.ewarn('  %s' % (dosyncuri,))
							out.ewarn('')
							out.quiet = quiet

						out.einfo('Manifest timestamp: %s UTC' % (ts.ts,))
						out.einfo('Valid OpenPGP signature found:')
						out.einfo('- primary key: %s' % (
							m.openpgp_signature.primary_key_fingerprint))
						out.einfo('- subkey: %s' % (
							m.openpgp_signature.fingerprint))
						out.einfo('- timestamp: %s UTC' % (
							m.openpgp_signature.timestamp))

						# if nothing has changed, skip the actual Manifest
						# verification
						if not local_state_unchanged:
							out.ebegin('Verifying %s' % (download_dir,))
							m.assert_directory_verifies()
							out.eend(0)
					except GematoException as e:
						writemsg_level("!!! Manifest verification failed:\n%s\n"
								% (e,),
								level=logging.ERROR, noiselevel=-1)
						exitcode = 1
						verify_failure = True

			if exitcode == 0 and not local_state_unchanged:
				self.repo_storage.commit_update()
				self.repo_storage.garbage_collection()

			return (exitcode, updatecache_flg)
		finally:
			# Don't delete the update if verification failed, in case
			# the cause needs to be investigated.
			if not verify_failure:
				self.repo_storage.abort_update()
			if openpgp_env is not None:
				openpgp_env.close()
Example #38
0
	def _sync(self):
		'''Internal sync function which performs only the sync'''
		opts = self.options.get('emerge_config').opts
		self.usersync_uid = self.options.get('usersync_uid', None)
		enter_invalid = '--ask-enter-invalid' in opts
		out = portage.output.EOutput()
		syncuri = self.repo.sync_uri
		vcs_dirs = frozenset(VCS_DIRS)
		vcs_dirs = vcs_dirs.intersection(os.listdir(self.repo.location))

		for vcs_dir in vcs_dirs:
			writemsg_level(("!!! %s appears to be under revision " + \
				"control (contains %s).\n!!! Aborting rsync sync.\n") % \
				(self.repo.location, vcs_dir), level=logging.ERROR, noiselevel=-1)
			return (1, False)
		self.timeout=180

		rsync_opts = []
		if self.settings["PORTAGE_RSYNC_OPTS"] == "":
			rsync_opts = self._set_rsync_defaults()
		else:
			rsync_opts = self._validate_rsync_opts(rsync_opts, syncuri)
		self.rsync_opts = self._rsync_opts_extend(opts, rsync_opts)

		self.extra_rsync_opts = portage.util.shlex_split(
			self.settings.get("PORTAGE_RSYNC_EXTRA_OPTS",""))

		# Real local timestamp file.
		self.servertimestampfile = os.path.join(
			self.repo.location, "metadata", "timestamp.chk")

		content = portage.util.grabfile(self.servertimestampfile)
		timestamp = 0
		if content:
			try:
				timestamp = time.mktime(time.strptime(content[0],
					TIMESTAMP_FORMAT))
			except (OverflowError, ValueError):
				pass
		del content

		try:
			self.rsync_initial_timeout = \
				int(self.settings.get("PORTAGE_RSYNC_INITIAL_TIMEOUT", "15"))
		except ValueError:
			self.rsync_initial_timeout = 15

		try:
			maxretries=int(self.settings["PORTAGE_RSYNC_RETRIES"])
		except SystemExit as e:
			raise # Needed else can't exit
		except:
			maxretries = -1 #default number of retries

		if syncuri.startswith("file://"):
			self.proto = "file"
			dosyncuri = syncuri[6:]
			is_synced, exitcode = self._do_rsync(
				dosyncuri, timestamp, opts)
			self._process_exitcode(exitcode, dosyncuri, out, 1)
			return (exitcode, exitcode == os.EX_OK)

		retries=0
		try:
			self.proto, user_name, hostname, port = re.split(
				r"(rsync|ssh)://([^:/]+@)?(\[[:\da-fA-F]*\]|[^:/]*)(:[0-9]+)?",
				syncuri, maxsplit=4)[1:5]
		except ValueError:
			writemsg_level("!!! sync-uri is invalid: %s\n" % syncuri,
				noiselevel=-1, level=logging.ERROR)
			return (1, False)

		self.ssh_opts = self.settings.get("PORTAGE_SSH_OPTS")

		if port is None:
			port=""
		if user_name is None:
			user_name=""
		if re.match(r"^\[[:\da-fA-F]*\]$", hostname) is None:
			getaddrinfo_host = hostname
		else:
			# getaddrinfo needs the brackets stripped
			getaddrinfo_host = hostname[1:-1]
		updatecache_flg=True
		all_rsync_opts = set(self.rsync_opts)
		all_rsync_opts.update(self.extra_rsync_opts)

		family = socket.AF_UNSPEC
		if "-4" in all_rsync_opts or "--ipv4" in all_rsync_opts:
			family = socket.AF_INET
		elif socket.has_ipv6 and \
			("-6" in all_rsync_opts or "--ipv6" in all_rsync_opts):
			family = socket.AF_INET6

		addrinfos = None
		uris = []

		try:
			addrinfos = getaddrinfo_validate(
				socket.getaddrinfo(getaddrinfo_host, None,
				family, socket.SOCK_STREAM))
		except socket.error as e:
			writemsg_level(
				"!!! getaddrinfo failed for '%s': %s\n" % (hostname, e),
				noiselevel=-1, level=logging.ERROR)

		if addrinfos:

			AF_INET = socket.AF_INET
			AF_INET6 = None
			if socket.has_ipv6:
				AF_INET6 = socket.AF_INET6

			ips_v4 = []
			ips_v6 = []

			for addrinfo in addrinfos:
				if addrinfo[0] == AF_INET:
					ips_v4.append("%s" % addrinfo[4][0])
				elif AF_INET6 is not None and addrinfo[0] == AF_INET6:
					# IPv6 addresses need to be enclosed in square brackets
					ips_v6.append("[%s]" % addrinfo[4][0])

			random.shuffle(ips_v4)
			random.shuffle(ips_v6)

			# Give priority to the address family that
			# getaddrinfo() returned first.
			if AF_INET6 is not None and addrinfos and \
				addrinfos[0][0] == AF_INET6:
				ips = ips_v6 + ips_v4
			else:
				ips = ips_v4 + ips_v6

			for ip in ips:
				uris.append(syncuri.replace(
					"//" + user_name + hostname + port + "/",
					"//" + user_name + ip + port + "/", 1))

		if not uris:
			# With some configurations we need to use the plain hostname
			# rather than try to resolve the ip addresses (bug #340817).
			uris.append(syncuri)

		# reverse, for use with pop()
		uris.reverse()

		effective_maxretries = maxretries
		if effective_maxretries < 0:
			effective_maxretries = len(uris) - 1

		while (1):
			if uris:
				dosyncuri = uris.pop()
			else:
				writemsg("!!! Exhausted addresses for %s\n" % \
					hostname, noiselevel=-1)
				return (1, False)

			if (retries==0):
				if "--ask" in opts:
					uq = UserQuery(opts)
					if uq.query("Do you want to sync your Portage tree " + \
						"with the mirror at\n" + blue(dosyncuri) + bold("?"),
						enter_invalid) == "No":
						print()
						print("Quitting.")
						print()
						sys.exit(128 + signal.SIGINT)
				self.logger(self.xterm_titles,
					">>> Starting rsync with " + dosyncuri)
				if "--quiet" not in opts:
					print(">>> Starting rsync with "+dosyncuri+"...")
			else:
				self.logger(self.xterm_titles,
					">>> Starting retry %d of %d with %s" % \
						(retries, effective_maxretries, dosyncuri))
				writemsg_stdout(
					"\n\n>>> Starting retry %d of %d with %s\n" % \
					(retries, effective_maxretries, dosyncuri), noiselevel=-1)

			if dosyncuri.startswith('ssh://'):
				dosyncuri = dosyncuri[6:].replace('/', ':/', 1)

			is_synced, exitcode = self._do_rsync(dosyncuri, timestamp, opts)
			if is_synced:
				break

			retries=retries+1

			if maxretries < 0 or retries <= maxretries:
				print(">>> Retrying...")
			else:
				# over retries
				# exit loop
				updatecache_flg=False
				exitcode = EXCEEDED_MAX_RETRIES
				break
		self._process_exitcode(exitcode, dosyncuri, out, maxretries)
		return (exitcode, updatecache_flg)
Example #39
0
def main(settings=None, logger=None):
    """Main program operation method....

    @param settings: dict.  defaults to settings.DEFAULTS
    @param logger: python logging module defaults to init_logger(settings)
    @return boolean  success/failure
    """
    if settings is None:
        print("NO Input settings, using defaults...")
        settings = DEFAULTS.copy()

    if logger is None:
        logger = init_logger(settings)

    _libs_to_check = settings["library"]

    if not settings["stdout"].isatty() or settings["nocolor"]:
        nocolor()

    logger.warning(
        blue(" * ") + yellow("This is the new python coded version"))
    logger.warning(
        blue(" * ") + yellow("Please report any bugs found using it."))
    logger.warning(
        blue(" * ") + yellow("The original revdep-rebuild script is "
                             "installed as revdep-rebuild.sh"))
    logger.warning(
        blue(" * ") + yellow("Please file bugs at: "
                             "https://bugs.gentoo.org/"))

    if os.getuid() != 0 and not settings["PRETEND"]:
        logger.warning(
            blue(" * ") +
            yellow("You are not root, adding --pretend to portage options"))
        settings["PRETEND"] = True

    logger.debug("\tmain(), _libs_to_check = %s" % str(_libs_to_check))

    if settings["USE_TMP_FILES"] and check_temp_files(
            settings["DEFAULT_TMP_DIR"], logger=logger):
        libraries, la_libraries, libraries_links, binaries = read_cache(
            settings["DEFAULT_TMP_DIR"])
        assigned, orphaned = analyse(
            settings=settings,
            logger=logger,
            libraries=libraries,
            la_libraries=la_libraries,
            libraries_links=libraries_links,
            binaries=binaries,
            _libs_to_check=_libs_to_check,
        )
    else:
        assigned, orphaned = analyse(settings,
                                     logger,
                                     _libs_to_check=_libs_to_check)

    if not assigned and not orphaned:
        logger.warning("\n" + bold("Your system is consistent"))
        # return the correct exit code
        return 0
    elif orphaned:
        # blank line for beter visibility of the following lines
        logger.warning("")
        if settings["library"]:
            logger.warning(
                red(" !!! Dependant orphaned files: ") +
                bold("No installed package was found for the following:"))
        else:
            logger.warning(
                red(" !!! Broken orphaned files: ") +
                bold("No installed package was found for the following:"))
        for filename in orphaned:
            logger.warning(red("\t* ") + filename)

    success = rebuild(logger, assigned, settings)
    logger.debug("rebuild return code = %i" % success)
    return success
Example #40
0
def _create_use_string(
    conf, name, cur_iuse, iuse_forced, cur_use, old_iuse, old_use, is_new, feature_flags, reinst_flags
):

    if not conf.print_use_string:
        return ""

    enabled = []
    if conf.alphabetical:
        disabled = enabled
        removed = enabled
    else:
        disabled = []
        removed = []
    cur_iuse = set(cur_iuse)
    enabled_flags = cur_iuse.intersection(cur_use)
    removed_iuse = set(old_iuse).difference(cur_iuse)
    any_iuse = cur_iuse.union(old_iuse)
    any_iuse = list(any_iuse)
    any_iuse.sort()

    for flag in any_iuse:
        flag_str = None
        isEnabled = False
        reinst_flag = reinst_flags and flag in reinst_flags
        if flag in enabled_flags:
            isEnabled = True
            if is_new or flag in old_use and (conf.all_flags or reinst_flag):
                flag_str = red(flag)
            elif flag not in old_iuse:
                flag_str = yellow(flag) + "%*"
            elif flag not in old_use:
                flag_str = green(flag) + "*"
        elif flag in removed_iuse:
            if conf.all_flags or reinst_flag:
                flag_str = yellow("-" + flag) + "%"
                if flag in old_use:
                    flag_str += "*"
                flag_str = "(" + flag_str + ")"
                removed.append(flag_str)
            continue
        else:
            if is_new or flag in old_iuse and flag not in old_use and (conf.all_flags or reinst_flag):
                flag_str = blue("-" + flag)
            elif flag not in old_iuse:
                flag_str = yellow("-" + flag)
                if flag not in iuse_forced:
                    flag_str += "%"
            elif flag in old_use:
                flag_str = green("-" + flag) + "*"
        if flag_str:
            if flag in feature_flags:
                flag_str = "{" + flag_str + "}"
            elif flag in iuse_forced:
                flag_str = "(" + flag_str + ")"
            if isEnabled:
                enabled.append(flag_str)
            else:
                disabled.append(flag_str)

    if conf.alphabetical:
        ret = " ".join(enabled)
    else:
        ret = " ".join(enabled + disabled + removed)
    if ret:
        ret = '%s="%s" ' % (name, ret)
    return ret
Example #41
0
def collect_libraries_from_dir(dirs, mask, logger):
    ''' Collects all libraries from specified list of directories.
		mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory
		Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair
		(symlink_id, library_id) for resolving dependencies
	'''

    # contains list of directories found
    # allows us to reduce number of fnc calls
    found_directories = set()
    found_files = set()
    found_symlinks = set()
    found_la_files = set()  # la libraries

    for _dir in dirs:
        if _dir in mask:
            continue

        try:
            for _listing in os.listdir(_dir):
                listing = os.path.join(_dir, _listing)
                if listing in mask or _listing in mask:
                    continue

                if os.path.isdir(listing):
                    if os.path.islink(listing):
                        #we do not want scan symlink-directories
                        pass
                    else:
                        found_directories.add(listing)
                elif os.path.isfile(listing):
                    if (listing.endswith('.so') or listing.endswith('.a')
                            or '.so.' in listing):

                        if os.path.islink(listing):
                            found_symlinks.add(listing)
                        else:
                            found_files.add(listing)
                        continue
                    elif listing.endswith('.la'):
                        if listing in found_la_files:
                            continue

                        found_la_files.add(listing)
                    else:
                        # sometimes there are binaries in libs' subdir,
                        # for example in nagios
                        if not os.path.islink(listing):
                            #if listing in found_files or listing in found_symlinks:
                            #continue
                            prv = os.stat(listing)[stat.ST_MODE]
                            if prv & stat.S_IXUSR == stat.S_IXUSR or \
                              prv & stat.S_IXGRP == stat.S_IXGRP or \
                              prv & stat.S_IXOTH == stat.S_IXOTH:
                                found_files.add(listing)
        except Exception as ex:
            logger.debug('\t' + yellow('Exception collecting libraries: ' +
                                       blue('%s') % str(ex)))

    if found_directories:
        _file, la_file, link = \
         collect_libraries_from_dir(found_directories, mask, logger)
        found_files.update(_file)
        found_la_files.update(la_file)
        found_symlinks.update(link)
    return (found_files, found_la_files, found_symlinks)
Example #42
0
    def swig_generate(self):
        "build all available modules"

        quote = lambda s: '"' + s + '"'
        for module in self.MODULES:
            module_name = self.MODULES[module][0]
            config_cmd = self.MODULES[module][1]
            module_pkg_name = self.MODULES[module][2]
            mod_hack_name = self.MODULES[module][3]
            mod_out_prefix = module_pkg_name.replace('.', os.sep) + module

            if sys.platform == "win32":
                INCLUDE_DIR = os.path.join(os.getenv("COINDIR"), "include")
                CPP_FLAGS = "-I" + quote(INCLUDE_DIR) + " " + \
                            "-I" + quote(os.path.join(os.getenv("COINDIR"), "include", "Inventor", "annex")) + \
                            " /DCOIN_DLL /wd4244 /wd4049"
                # aquire highest non-debug Coin library version
                try:
                    LDFLAGS_LIBS = quote(
                        max(
                            glob.glob(
                                os.path.join(os.getenv("COINDIR"), "lib",
                                             "coin?.lib")))) + " "
                # with cmake the coin library is named Coin4.lib
                except ValueError:
                    LDFLAGS_LIBS = quote(
                        max(
                            glob.glob(
                                os.path.join(os.getenv("COINDIR"), "lib",
                                             "Coin?.lib")))) + " "

                if module == "sowin":
                    CPP_FLAGS += " /DSOWIN_DLL"
                    LDFLAGS_LIBS += quote(
                        os.path.join(os.getenv("COINDIR"), "lib",
                                     "sowin1.lib"))
                elif module == "soqt":
                    CPP_FLAGS += " -I" + '"' + os.getenv(
                        "QTDIR") + "\\include\"  /DSOQT_DLL"
                    if os.path.isdir(os.getenv("QTDIR") + "\\include\Qt\""):
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\Qt\""
                        LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"),
                                                     "lib", "soqt1.lib") + " "
                    else:
                        # workaround for conda qt4:
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\qt\Qt\""
                        CPP_FLAGS += " -I" + '"' + os.getenv(
                            "QTDIR") + "\\include\qt\""
                        LDFLAGS_LIBS += os.path.join(os.getenv("COINDIR"),
                                                     "lib", "SoQt.lib") + " "
            else:
                INCLUDE_DIR = self.do_os_popen("coin-config --includedir")
                if module_name != 'coin':
                    mod_include_dir = self.do_os_popen("%s --includedir" %
                                                       config_cmd)
                    if mod_include_dir != INCLUDE_DIR:
                        INCLUDE_DIR += '\" -I\"%s' % mod_include_dir
                CPP_FLAGS = self.do_os_popen(
                    "%s --cppflags" %
                    config_cmd) + " -Wno-unused -Wno-maybe-uninitialized"
                LDFLAGS_LIBS = self.do_os_popen("%s --ldflags --libs" %
                                                config_cmd)

            if not os.path.isfile(mod_out_prefix + "_wrap.cpp"):
                print(
                    red("\n=== Generating %s_wrap.cpp for %s ===\n" %
                        (mod_out_prefix, module)))
                print(
                    blue(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS + " " +
                         self.SWIG_PARAMS %
                         (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix, module)))
                if os.system(self.SWIG + " " + self.SWIG_SUPPRESS_WARNINGS +
                             " " + self.SWIG_PARAMS %
                             (INCLUDE_DIR, self.CXX_INCS, mod_out_prefix,
                              mod_hack_name)):
                    print(
                        red("SWIG did not generate wrappers successfully! ** Aborting **"
                            ))
                    sys.exit(1)
            else:
                print(
                    red("=== %s_wrap.cpp for %s already exists! ===" %
                        (mod_out_prefix, module_pkg_name + module)))

            self.ext_modules.append(
                Extension(
                    module_name, [mod_out_prefix + "_wrap.cpp"],
                    extra_compile_args=(self.CXX_INCS + CPP_FLAGS).split(),
                    extra_link_args=(self.CXX_LIBS + LDFLAGS_LIBS).split()))
Example #43
0
def analyse(settings,
            logger,
            libraries=None,
            la_libraries=None,
            libraries_links=None,
            binaries=None,
            _libs_to_check=None):
    """Main program body.  It will collect all info and determine the
	pkgs needing rebuilding.

	@param logger: logger used for logging messages, instance of logging.Logger
				   class. Can be logging (RootLogger).
	@param _libs_to_check Libraries that need to be checked only
	@rtype list: list of pkgs that need rebuilding
	"""

    searchbits = set()
    '''if _libs_to_check:
		for lib in _libs_to_check:
			if "lib64" in lib:
				searchbits.add('64')
			elif "lib32" in lib:
				searchbits.add('32')
	else:
		_libs_to_check = set()'''
    searchbits.update(['64', '32'])

    masked_dirs, masked_files, ld = parse_revdep_config(
        settings['REVDEP_CONFDIR'])
    masked_dirs.update([
        '/lib/modules',
        '/lib32/modules',
        '/lib64/modules',
    ])

    if '64' not in searchbits:
        masked_dirs.update(['/lib64', '/usr/lib64'])
    elif '32' not in searchbits:
        masked_dirs.update(['/lib32', '/usr/lib32'])

    all_masks = masked_dirs.copy()
    all_masks.update(masked_files)
    logger.debug("\tall_masks:")
    for x in sorted(all_masks):
        logger.debug('\t\t%s' % (x))

    if libraries and la_libraries and libraries_links and binaries:
        logger.info(
            blue(' * ') +
            bold('Found a valid cache, skipping collecting phase'))
    else:
        #TODO: add partial cache (for ex. only libraries)
        # when found for some reason

        stime = current_milli_time()
        logger.warning(
            green(' * ') + bold('Collecting system binaries and libraries'))
        bin_dirs, lib_dirs = prepare_search_dirs(logger, settings)

        lib_dirs.update(ld)
        bin_dirs.update(ld)

        logger.debug('\tanalyse(), bin directories:')
        for x in sorted(bin_dirs):
            logger.debug('\t\t%s' % (x))
        logger.debug('\tanalyse(), lib directories:')
        for x in sorted(lib_dirs):
            logger.debug('\t\t%s' % (x))
        logger.debug('\tanalyse(), masked directories:')
        for x in sorted(masked_dirs):
            logger.debug('\t\t%s' % (x))
        logger.debug('\tanalyse(), masked files:')
        for x in sorted(masked_files):
            logger.debug('\t\t%s' % (x))

        ftime = current_milli_time()
        logger.debug('\ttime to complete task: %d milliseconds' %
                     (ftime - stime))
        stime = current_milli_time()
        logger.info(
            green(' * ') + bold('Collecting dynamic linking informations'))

        libraries, la_libraries, libraries_links = \
         collect_libraries_from_dir(lib_dirs, all_masks, logger)
        binaries = collect_binaries_from_dir(bin_dirs, all_masks, logger)
        ftime = current_milli_time()
        logger.debug('\ttime to complete task: %d milliseconds' %
                     (ftime - stime))

        if settings['USE_TMP_FILES']:
            save_cache(logger=logger,
                       to_save={
                           'libraries': libraries,
                           'la_libraries': la_libraries,
                           'libraries_links': libraries_links,
                           'binaries': binaries
                       },
                       temp_path=settings['DEFAULT_TMP_DIR'])

    logger.debug(
        '\tanalyse(), Found %i libraries (+%i symlinks) and %i binaries' %
        (len(libraries), len(libraries_links), len(binaries)))
    logger.info(green(' * ') + bold('Scanning files'))

    libs_and_bins = libraries.union(binaries)

    scanned_files = scan_files(libs_and_bins, settings['CMD_MAX_ARGS'], logger,
                               searchbits)

    logger.warning(green(' * ') + bold('Checking dynamic linking consistency'))
    logger.debug(
        '\tanalyse(), Searching for %i libs, bins within %i libraries and links'
        % (len(libs_and_bins), len(libraries) + len(libraries_links)))

    libcheck = LibCheck(scanned_files, logger, _libs_to_check, searchbits,
                        all_masks, masked_dirs)

    broken_pathes = libcheck.process_results(libcheck.search())

    broken_la = extract_dependencies_from_la(la_libraries,
                                             libraries.union(libraries_links),
                                             _libs_to_check, logger)
    broken_pathes += broken_la

    if broken_pathes:
        logger.warning(green(' * ') + bold('Assign files to packages'))
        return assign_packages(broken_pathes, logger, settings)
    return None, None  # no need to assign anything