Exemplo n.º 1
0
def add_processing_files():
    """
    send client information about all currently processing Files
    """
    files = FileRepository.get_processing_query().all()

    for file in files:
        # emit the file_started event for *every* file that is currently being processed
        socketio.emit("file_started", {"data": formatted_file_data(file)})
Exemplo n.º 2
0
def add_processing_files():
    """
    send client information about all currently processing Files
    """
    files = FileRepository.get_processing_query().all()

    for file in files:
        # emit the file_started event for *every* file that is currently being processed
        socketio.emit("file_started", {"data": formatted_file_data(file)})
Exemplo n.º 3
0
    def check_and_start_processes():
        """
        check if it's required to start new Processes and do so if needed
        """

        while ProcessRepository.encoding_active:
            # grab next potential file to process
            file = FileRepository.get_queued_query().order_by(
                Package.position.asc(), File.position.asc()).first()

            if file is None or ProcessRepository.count_processes_active(
            ) >= config["general"].getint("parallel_processes"):
                break

            # update file.status in DB
            file.status = StatusMap.processing.value
            db.session.commit()

            # start the Process
            from app.modules.mod_process.process import Process
            process = Process(file)
            process.daemon = True

            # todo debug
            # file.status = 0
            # db.session.commit()
            # ProcessRepository.encoding_active = False

            # add to "processes" dict
            ProcessRepository.processes[file.id] = process

            process.start()

            # emit file_started event
            data = formatted_file_data(file)
            data["count_active"] = ProcessRepository.count_processes_active()
            data["count_queued"] = ProcessRepository.count_processes_queued()
            socketio.emit("file_started", {"data": data})
Exemplo n.º 4
0
    def check_and_start_processes():
        """
        check if it's required to start new Processes and do so if needed
        """

        while ProcessRepository.encoding_active:
            # grab next potential file to process
            file = FileRepository.get_queued_query().order_by(Package.position.asc(), File.position.asc()).first()

            if file is None or ProcessRepository.count_processes_active() >= config["general"].getint(
                    "parallel_processes"):
                break

            # update file.status in DB
            file.status = StatusMap.processing.value
            db.session.commit()

            # start the Process
            from app.modules.mod_process.process import Process
            process = Process(file)
            process.daemon = True

            # todo debug
            # file.status = 0
            # db.session.commit()
            # ProcessRepository.encoding_active = False

            # add to "processes" dict
            ProcessRepository.processes[file.id] = process

            process.start()

            # emit file_started event
            data = formatted_file_data(file)
            data["count_active"] = ProcessRepository.count_processes_active()
            data["count_queued"] = ProcessRepository.count_processes_queued()
            socketio.emit("file_started", {"data": data})
Exemplo n.º 5
0
    def count_processes_queued():
        """
        :return: the amount of Files currently queued
        """

        return FileRepository.get_queued_query().count()
Exemplo n.º 6
0
    def count_processes_queued():
        """
        :return: the amount of Files currently queued
        """

        return FileRepository.get_queued_query().count()