Esempio n. 1
0
    def __cacher(self, run_until_empty=False, sync=False, _loop=False):
        """
        This is where the actual asynchronous copy takes
        place. __cacher runs on a different threads and
        all the operations done by this are atomic and
        thread-safe. It just loops over and over until
        __alive becomes False.
        """
        try:
            if self.__inside_with_stmt != 0:
                return
        except AttributeError:
            # interpreter shutdown
            pass

        # make sure our set delay is respected
        try:
            self.__cache_writer.set_delay(EntropyCacher.WRITEBACK_TIMEOUT)
        except AttributeError:
            # can be None
            pass

        # sleep if there's nothing to do
        if _loop:
            try:
                # CANBLOCK
                self.__worker_sem.acquire()
                # we just consumed one acquire()
                # that was dedicated to actual data,
                # put it back
                self.__worker_sem.release()
            except AttributeError:
                pass

        def _commit_data(_massive_data):
            for (key, cache_dir), data in _massive_data:
                d_o = entropy.dump.dumpobj
                if d_o is not None:
                    d_o(key, data, dump_dir=cache_dir)

        while self.__alive or run_until_empty:

            if const_debug_enabled():
                const_debug_write(
                    __name__,
                    "EntropyCacher.__cacher: loop: %s, alive: %s, empty: %s" %
                    (
                        _loop,
                        self.__alive,
                        run_until_empty,
                    ))

            with self.__enter_context_lock:
                massive_data = []
                try:
                    massive_data_count = EntropyCacher._OBJS_WRITTEN_AT_ONCE
                except AttributeError:  # interpreter shutdown
                    break
                while massive_data_count > 0:

                    if _loop:
                        # extracted an item from worker_sem
                        # call down() on the semaphore without caring
                        # can't sleep here because we're in a critical region
                        # holding __enter_context_lock
                        self.__worker_sem.acquire(False)

                    massive_data_count -= 1
                    try:
                        data = self.__cache_buffer.pop()
                    except (
                            ValueError,
                            TypeError,
                    ):
                        # TypeError is when objects are being destroyed
                        break  # stack empty
                    massive_data.append(data)

                if not massive_data:
                    break

                task = ParallelTask(_commit_data, massive_data)
                task.name = "EntropyCacherCommitter"
                task.daemon = not sync
                task.start()
                if sync:
                    task.join()

                if const_debug_enabled():
                    const_debug_write(
                        __name__,
                        "EntropyCacher.__cacher [%s], writing %s objs" % (
                            task,
                            len(massive_data),
                        ))

                if EntropyCacher.STASHING_CACHE:
                    for (key, cache_dir), data in massive_data:
                        try:
                            del self.__stashing_cache[(key, cache_dir)]
                        except (
                                AttributeError,
                                KeyError,
                        ):
                            continue
                del massive_data[:]
                del massive_data
Esempio n. 2
0
    def __cacher(self, run_until_empty=False, sync=False, _loop=False):
        """
        This is where the actual asynchronous copy takes
        place. __cacher runs on a different threads and
        all the operations done by this are atomic and
        thread-safe. It just loops over and over until
        __alive becomes False.
        """
        try:
            if self.__inside_with_stmt != 0:
                return
        except AttributeError:
            # interpreter shutdown
            pass

        # make sure our set delay is respected
        try:
            self.__cache_writer.set_delay(EntropyCacher.WRITEBACK_TIMEOUT)
        except AttributeError:
            # can be None
            pass

        # sleep if there's nothing to do
        if _loop:
            try:
                # CANBLOCK
                self.__worker_sem.acquire()
                # we just consumed one acquire()
                # that was dedicated to actual data,
                # put it back
                self.__worker_sem.release()
            except AttributeError:
                pass

        def _commit_data(_massive_data):
            for (key, cache_dir), data in _massive_data:
                d_o = entropy.dump.dumpobj
                if d_o is not None:
                    d_o(key, data, dump_dir=cache_dir)

        while self.__alive or run_until_empty:

            if const_debug_enabled():
                const_debug_write(
                    __name__,
                    "EntropyCacher.__cacher: loop: %s, alive: %s, empty: %s" % (_loop, self.__alive, run_until_empty),
                )

            with self.__enter_context_lock:
                massive_data = []
                try:
                    massive_data_count = EntropyCacher._OBJS_WRITTEN_AT_ONCE
                except AttributeError:  # interpreter shutdown
                    break
                while massive_data_count > 0:

                    if _loop:
                        # extracted an item from worker_sem
                        # call down() on the semaphore without caring
                        # can't sleep here because we're in a critical region
                        # holding __enter_context_lock
                        self.__worker_sem.acquire(False)

                    massive_data_count -= 1
                    try:
                        data = self.__cache_buffer.pop()
                    except (ValueError, TypeError):
                        # TypeError is when objects are being destroyed
                        break  # stack empty
                    massive_data.append(data)

                if not massive_data:
                    break

                task = ParallelTask(_commit_data, massive_data)
                task.name = "EntropyCacherCommitter"
                task.daemon = not sync
                task.start()
                if sync:
                    task.join()

                if const_debug_enabled():
                    const_debug_write(
                        __name__, "EntropyCacher.__cacher [%s], writing %s objs" % (task, len(massive_data))
                    )

                if EntropyCacher.STASHING_CACHE:
                    for (key, cache_dir), data in massive_data:
                        try:
                            del self.__stashing_cache[(key, cache_dir)]
                        except (AttributeError, KeyError):
                            continue
                del massive_data[:]
                del massive_data
Esempio n. 3
0
    def _install_action(self, entropy_client, deps, recursive,
                        pretend, ask, verbose, quiet, empty,
                        config_files, deep, fetch, bdeps,
                        onlydeps, relaxed, multifetch, packages,
                        package_matches=None):
        """
        Solo Install action implementation.

        Packages passed in the packages argument (as opposed to
        package_matches) will be marked as installed by user.
        """
        inst_repo = entropy_client.installed_repository()
        action_factory = entropy_client.PackageActionFactory()
        packages_by_user = set()

        with inst_repo.shared():

            self._advise_repository_update(entropy_client)
            if self._check_critical_updates:
                self._advise_packages_update(entropy_client)

            if package_matches is None:
                packages = self._match_packages_for_installation(
                    entropy_client, onlydeps, packages)
                if not packages:
                    return 1, False
                packages_by_user = set(packages)
            else:
                packages = package_matches

            run_queue, removal_queue = self._generate_install_queue(
                entropy_client, packages, deps, empty, deep, relaxed,
                onlydeps, bdeps, recursive)
            if (run_queue is None) or (removal_queue is None):
                return 1, False
            elif not (run_queue or removal_queue):
                entropy_client.output(
                    "%s." % (blue(_("Nothing to do")),),
                    level="warning", header=darkgreen(" @@ "))
                return 0, True

            self._show_install_queue(
                entropy_client, inst_repo,
                run_queue, removal_queue, ask, pretend, quiet, verbose)

            installed_pkg_sources = self._get_installed_packages_sources(
                entropy_client, inst_repo, run_queue)

        if ask:
            rc = entropy_client.ask_question(
                "     %s" % (_("Would you like to continue ?"),))
            if rc == _("No"):
                return 1, False

        if pretend:
            return 0, True # yes, tell user

        if self._interactive:
            exit_st = self._accept_license(
                entropy_client, inst_repo, run_queue)
            if exit_st != 0:
                return 1, False

        ugc_thread = None
        down_data = {}
        exit_st = self._download_packages(
            entropy_client, run_queue, down_data, multifetch)
        if exit_st == 0:
            ugc_thread = ParallelTask(
                self._signal_ugc, entropy_client, down_data)
            ugc_thread.name = "UgcThread"
            ugc_thread.start()

        elif exit_st != 0:
            return 1, False

        # is --fetch on? then quit.
        if fetch:
            if ugc_thread is not None:
                ugc_thread.join()
            entropy_client.output(
                "%s." % (
                    blue(_("Download complete")),),
                header=darkred(" @@ "))
            return 0, False

        notification_lock = UpdatesNotificationResourceLock(
            output=entropy_client)
        total = len(run_queue)

        notif_acquired = False
        try:
            # this is a best effort, we will not sleep if the lock
            # is not acquired because we may get blocked for an eternity
            # (well, for a very long time) in this scenario:
            # 1. RigoDaemon is running some action queue
            # 2. Another thread in RigoDaemon is stuck on the activity
            #    mutex with the notification lock held.
            # 3. We cannot move on here because of 2.
            # Nothing bad will happen if we just ignore the acquisition
            # state.
            notif_acquired = notification_lock.try_acquire_shared()

            metaopts = {
                'removeconfig': config_files,
                # This can be used by PackageAction based classes
                # to know what's the overall package schedule for
                # both upgrade and install actions. This way, we
                # can better handle conflicts.
                'install_queue' : run_queue,
            }

            for count, pkg_match in enumerate(run_queue, 1):

                source_id = installed_pkg_sources.get(pkg_match, None)

                if not onlydeps and pkg_match in packages_by_user:
                    metaopts['install_source'] = \
                        etpConst['install_sources']['user']
                elif source_id is not None:
                    # Retain the information.
                    # Install action can upgrade packages, their source
                    # should not be changed to automatic_dependency.
                    metaopts['install_source'] = source_id
                else:
                    metaopts['install_source'] = \
                        etpConst['install_sources']['automatic_dependency']

                package_id, repository_id = pkg_match
                atom = entropy_client.open_repository(
                    repository_id).retrieveAtom(package_id)

                pkg = None
                try:
                    pkg = action_factory.get(
                        action_factory.INSTALL_ACTION,
                        pkg_match, opts=metaopts)

                    xterm_header = "equo (%s) :: %d of %d ::" % (
                        _("install"), count, total)

                    pkg.set_xterm_header(xterm_header)

                    entropy_client.output(
                        purple(atom),
                        count=(count, total),
                        header=darkgreen(" +++ ") + ">>> ")

                    exit_st = pkg.start()
                    if exit_st != 0:
                        if ugc_thread is not None:
                            ugc_thread.join()
                        return 1, True

                finally:
                    if pkg is not None:
                        pkg.finalize()

        finally:
            if notif_acquired:
                notification_lock.release()

        if ugc_thread is not None:
            ugc_thread.join()

        entropy_client.output(
            "%s." % (
                blue(_("Installation complete")),),
            header=darkred(" @@ "))
        return 0, True
Esempio n. 4
0
    def _install_action(self, entropy_client, deps, recursive,
                        pretend, ask, verbose, quiet, empty,
                        config_files, deep, fetch, bdeps,
                        onlydeps, relaxed, multifetch, packages,
                        package_matches=None):
        """
        Solo Install action implementation.
        """
        inst_repo = entropy_client.installed_repository()
        action_factory = entropy_client.PackageActionFactory()

        with inst_repo.shared():

            self._advise_repository_update(entropy_client)
            if self._check_critical_updates:
                self._advise_packages_update(entropy_client)

            if package_matches is None:
                packages = self._scan_packages(
                    entropy_client, packages)
                if not packages:
                    entropy_client.output(
                        "%s." % (
                            darkred(_("No packages found")),),
                        level="error", importance=1)
                    return 1, False
            else:
                packages = package_matches

            run_queue, removal_queue = self._generate_install_queue(
                entropy_client, packages, deps, empty, deep, relaxed,
                onlydeps, bdeps, recursive)
            if (run_queue is None) or (removal_queue is None):
                return 1, False
            elif not (run_queue or removal_queue):
                entropy_client.output(
                    "%s." % (blue(_("Nothing to do")),),
                    level="warning", header=darkgreen(" @@ "))
                return 0, True

            self._show_install_queue(
                entropy_client, inst_repo,
                run_queue, removal_queue, ask, pretend, quiet, verbose)

        if ask:
            rc = entropy_client.ask_question(
                "     %s" % (_("Would you like to continue ?"),))
            if rc == _("No"):
                return 1, False

        if pretend:
            return 0, True # yes, tell user

        if self._interactive:
            exit_st = self._accept_license(
                entropy_client, inst_repo, run_queue)
            if exit_st != 0:
                return 1, False

        ugc_thread = None
        down_data = {}
        exit_st = self._download_packages(
            entropy_client, run_queue, down_data, multifetch)
        if exit_st == 0:
            ugc_thread = ParallelTask(
                self._signal_ugc, entropy_client, down_data)
            ugc_thread.name = "UgcThread"
            ugc_thread.start()

        elif exit_st != 0:
            return 1, False

        # is --fetch on? then quit.
        if fetch:
            if ugc_thread is not None:
                ugc_thread.join()
            entropy_client.output(
                "%s." % (
                    blue(_("Download complete")),),
                header=darkred(" @@ "))
            return 0, False

        package_set = set(packages)
        total = len(run_queue)
        for count, pkg_match in enumerate(run_queue, 1):

            metaopts = {
                'removeconfig': config_files,
            }

            if onlydeps:
                metaopts['install_source'] = \
                    etpConst['install_sources']['automatic_dependency']
            elif pkg_match in package_set:
                metaopts['install_source'] = \
                    etpConst['install_sources']['user']
            else:
                metaopts['install_source'] = \
                    etpConst['install_sources']['automatic_dependency']

            package_id, repository_id = pkg_match
            atom = entropy_client.open_repository(
                repository_id).retrieveAtom(package_id)

            pkg = None
            try:
                pkg = action_factory.get(
                    action_factory.INSTALL_ACTION,
                    pkg_match, opts=metaopts)

                xterm_header = "equo (%s) :: %d of %d ::" % (
                    _("install"), count, total)

                pkg.set_xterm_header(xterm_header)

                entropy_client.output(
                    purple(atom),
                    count=(count, total),
                    header=darkgreen(" +++ ") + ">>> ")

                exit_st = pkg.start()
                if exit_st != 0:
                    if ugc_thread is not None:
                        ugc_thread.join()
                    return 1, True

            finally:
                if pkg is not None:
                    pkg.finalize()

        if ugc_thread is not None:
            ugc_thread.join()

        entropy_client.output(
            "%s." % (
                blue(_("Installation complete")),),
            header=darkred(" @@ "))
        return 0, True