コード例 #1
0
ファイル: handler.py プロジェクト: tridentstream/mediaserver
    def __init__(self, config):
        self.config = config
        self.listing_builder = ListingBuilder(self)
        self.automatic_rebuild_lock = threading.Lock()

        if self.can_automatically_rebuild:
            # TODO: should run after bootstrap
            threadify(self.schedule_automatic_rebuild, delay=3)()
            threadify(self.rebuild_listings, delay=30)()
コード例 #2
0
ファイル: searcher.py プロジェクト: tridentstream/mediaserver
    def filters_multiple(plugins):
        threads = []
        for plugin in plugins:

            def get_filters(plugin):
                return plugin.filters

            threads.append((plugin, threadify(get_filters,
                                              cache_result=True)(plugin)))

        retval = None
        for plugin, thread in threads:
            filters = thread()
            if not filters:
                continue

            if retval is None:
                retval = filters
            else:
                retval.merge(filters)

        return retval
コード例 #3
0
ファイル: handler.py プロジェクト: tridentstream/mediaserver
 def rebuild_listing(self, config, path, delay=False):
     threadify(self.listing_builder.get_listing, delay=delay and 3.0 or 0)(
         config, path, use_background_recheck=False
     )
コード例 #4
0
 def update_request(self, *args):
     threadify(self.update)()
コード例 #5
0
ファイル: handler.py プロジェクト: tridentstream/mediaserver
    def _rescan(self, update_all_metadata=False):
        """
        Rescans filesystem for files
        """
        if self.is_rescanning:
            logger.warning("Already rescanning")
            return None

        notification = Notification(
            f"admin.{self.plugin_name}.{self.name}.rescan",
            "info",
            f"Filesystem {self.name}",
            "Started to rescan",
            permission_type="is_admin",
        )
        notification_start_dt = datetime.now()
        if self.notifier:
            self.notifier.notify(notification)

        with Log.objects.start_chain(self, "INPUT.RESCAN") as log:
            log.log(
                0,
                f"A rescan is started and update_all_metadata {update_all_metadata}"
            )

            self.is_rescanning = True
            self.rescan_done = defer.Deferred()

            logger.info("Rescanning")
            queue = Queue(20)

            class QueueCommand:
                INSERT = 0
                ENSURE_PREFIXES = 1
                DONE = 2

            def walk_path(queue, prefix, path):
                logger.info(
                    f"Starting to scan {path!r} with prefix {prefix!r}")
                list_queue = []
                queue_size = 0

                for r, dirs, files in os.walk(path, followlinks=True):
                    useful_files = []
                    for f in files:
                        full_path = os.path.join(r, f)
                        if not os.path.exists(full_path):
                            logger.debug(f"We found broken link: {full_path}")
                            continue

                        useful_files.append((f, os.path.getsize(full_path)))

                    list_queue.append(
                        (r[len(path):].strip("/"), dirs, useful_files))
                    queue_size += len(dirs) + len(files)

                    if queue_size >= COMMIT_COUNTER:
                        queue.put(
                            (QueueCommand.INSERT, path, prefix, list_queue))
                        list_queue = []
                        queue_size = 0

                    if self.should_die:
                        logger.info(f"Got the death in walker for {path}")
                        return

                if list_queue:
                    queue.put((QueueCommand.INSERT, path, prefix, list_queue))

                queue.put((QueueCommand.DONE, path))

                logger.info(f"Done scanning {path!r}")

            def insert_into_vfs(vfs, queue, path_count):
                with vfs.session(True, always_trigger_new=update_all_metadata):
                    while path_count:
                        job = queue.get(True)
                        cmd = job[0]

                        if cmd == QueueCommand.INSERT:
                            logger.debug("Got insertion job")
                            _, path, prefix, items = job

                            for root, folders, files in items:
                                virtual_path = [prefix] + root.split(os.sep)

                                for item in folders:
                                    vp = "/".join([
                                        x for x in virtual_path + [item] if x
                                    ])
                                    vfs.add_dir(vp, int(time.time()))

                                for item, size in files:
                                    vp = "/".join([
                                        x for x in virtual_path + [item] if x
                                    ])
                                    actual_path = os.path.join(
                                        path, root, item)

                                    vfs.add_file(
                                        vp,
                                        size,
                                        int(time.time()),
                                        {"_actual_path": actual_path},
                                    )

                        elif cmd == QueueCommand.ENSURE_PREFIXES:
                            _, prefixes = job

                            for prefix in prefixes:
                                path = []
                                for p in prefix.strip("/").split("/"):
                                    path.append(p)
                                    vfs.add_dir("/".join(path),
                                                int(time.time()))
                        elif cmd == QueueCommand.DONE:
                            _, path = job
                            logger.info(f"Done inserting {path!r}")
                            path_count -= 1
                        else:
                            logger.error(f"Unknown command {job!r}")

                        if self.should_die:
                            logger.info(
                                f"Got the death in inserter for {path}")
                            break

                self.last_update = datetime.now()
                self.is_rescanning = False
                reactor.callFromThread(self.rescan_done.callback, None)
                self.rescan_done.callback = None

                logger.info("Done scanning all paths.")

            t = threadify(insert_into_vfs, cache_result=True)(self.vfs, queue,
                                                              len(self.paths))

            prefixes = set(p[0] for p in self.paths if p[0])
            if prefixes:
                queue.put((QueueCommand.ENSURE_PREFIXES, prefixes))

            for virtual_path, path in self.paths:
                threadify(walk_path)(queue, virtual_path, path)

            t()

            delta = datetime.now() - notification_start_dt
            if self.notifier:
                notification = notification.copy(
                    body=f"Finished rescanning, it took {delta}")
                self.notifier.notify(notification)

            log.log(100, f"A rescan finished after {delta}")
コード例 #6
0
ファイル: handler.py プロジェクト: tridentstream/mediaserver
 def rescan(self, update_all_metadata=False):
     threadify(self._rescan)(update_all_metadata)
     return "Rescanning"