Esempio n. 1
0
    def unpack_dir(self, target_dir):
        self.file_path = util.remove_suffix(".Z", self.file_path)

        ret, out, err = util.run_batch(
                "uncompress -cf %s.Z > %s" % (self.file_path, self.file_path))
        if ret != 0:
            raise RuntimeError(
                        _("Problem occured while uncompressing %s.Z file")
                        % self.file_path)

        self.tar = tarfile.open(self.file_path)

        oldwd = None
        try:
            # Don't fail if CWD doesn't exist (#6748)
            oldwd = os.getcwd()
        except OSError:
            pass
        os.chdir(target_dir)

        uid = os.getuid()
        gid = os.getgid()

        for tarinfo in self.tar:
            self.tar.extract(tarinfo)

            # tarfile.extract does not honor umask. It must be honored
            # explicitly. See --no-same-permissions option of tar(1),
            # which is the deafult behaviour.
            #
            # Note: This is no good while installing a pisi package.
            # Thats why this is optional.
            if self.no_same_permissions and not os.path.islink(tarinfo.name):
                os.chmod(tarinfo.name, tarinfo.mode & ~ctx.const.umask)

            if self.no_same_owner:
                if not os.path.islink(tarinfo.name):
                    os.chown(tarinfo.name, uid, gid)
                else:
                    os.lchown(tarinfo.name, uid, gid)

        # Bug #10680 and addition for tarZ files
        os.unlink(self.file_path)

        try:
            if oldwd:
                os.chdir(oldwd)
        # Bug #6748
        except OSError:
            pass
        self.tar.close()
Esempio n. 2
0
    def index(self, repo_uri, skip_sources=False):
        self.repo_dir = repo_uri

        packages = []
        specs = []
        deltas = {}

        for fn in os.walk(repo_uri).next()[2]:
            if fn.endswith(ctx.const.delta_package_suffix) or fn.endswith(ctx.const.package_suffix):
                pkgpath = os.path.join(repo_uri,
                                       util.parse_package_dir_path(fn))
                if not os.path.isdir(pkgpath): os.makedirs(pkgpath)
                ctx.ui.info("%-80.80s\r" % (_('Sorting: %s ') %
                    fn), noln = False if ctx.config.get_option("verbose") else True)
                shutil.copy2(os.path.join(repo_uri, fn), pkgpath)
                os.remove(os.path.join(repo_uri, fn))

        for root, dirs, files in os.walk(repo_uri):
            # Filter hidden directories
            # TODO: Add --exclude-dirs parameter to CLI and filter according
            # directories here
            dirs[:] = [d for d in dirs if not d.startswith(".")]

            for fn in files:

                if fn.endswith(ctx.const.delta_package_suffix):
                    name, version = util.parse_package_name(fn)
                    deltas.setdefault(name, []).append(os.path.join(root, fn))
                elif fn.endswith(ctx.const.package_suffix):
                    packages.append(os.path.join(root, fn))

                if fn == 'components.xml':
                    self.components.extend(add_components(os.path.join(root, fn)))
                if fn == 'pspec.xml' and not skip_sources:
                    specs.append((os.path.join(root, fn), repo_uri))
                if fn == 'distribution.xml':
                    self.distribution = add_distro(os.path.join(root, fn))
                if fn == 'groups.xml':
                    self.groups.extend(add_groups(os.path.join(root, fn)))

        ctx.ui.info("")

        # Create a process pool, as many processes as the number of CPUs we
        # have
        pool = multiprocessing.Pool()

        # Before calling pool.map check if list is empty or not: python#12157
        if specs:
            try:
                # Add source packages to index using a process pool
                self.specs = pool.map(add_spec, specs)
            except:
                # If an exception occurs (like a keyboard interrupt),
                # immediately terminate worker processes and propagate
                # exception. (CLI honors KeyboardInterrupt exception, if you're
                # not using CLI, you must handle KeyboardException yourself)
                pool.terminate()
                pool.join()
                ctx.ui.info("")
                raise

        try:
            obsoletes_list = list(map(str, self.distribution.obsoletes))
        except AttributeError:
            obsoletes_list = []

        latest_packages = []

        for pkg in util.filter_latest_packages(packages):
            pkg_name = util.parse_package_name(os.path.basename(pkg))[0]
            if pkg_name.endswith(ctx.const.debug_name_suffix):
                pkg_name = util.remove_suffix(ctx.const.debug_name_suffix,
                                              pkg_name)
            if pkg_name not in obsoletes_list:
                # Currently, multiprocessing.Pool.map method accepts methods
                # with single parameters only. So we have to send our
                # parameters as a tuple to workaround that

                latest_packages.append((pkg, deltas, repo_uri))

        # Before calling pool.map check if list is empty or not: python#12157
        if latest_packages:
            try:
                # Add binary packages to index using a process pool
                self.packages = pool.map(add_package, latest_packages)
            except:
                pool.terminate()
                pool.join()
                ctx.ui.info("")
                raise

        ctx.ui.info("")
        pool.close()
        pool.join()