Esempio n. 1
0
def add_processing_files():
    """
    send client information about all currently processing Files
    """
    files = FileRepository.get_processing_query().all()

    for file in files:
        # emit the file_started event for *every* file that is currently being processed
        socketio.emit("file_started", {"data": formatted_file_data(file)})
Esempio n. 2
0
def add_processing_files():
    """
    send client information about all currently processing Files
    """
    files = FileRepository.get_processing_query().all()

    for file in files:
        # emit the file_started event for *every* file that is currently being processed
        socketio.emit("file_started", {"data": formatted_file_data(file)})
Esempio n. 3
0
    def file_progress(file):
        """
        will be called whenever a file makes progress
        :param file: the File object of the File that has made progress
        """

        # format data
        info = formatted_file_data(file)

        socketio.emit("file_progress", {"data": info})
Esempio n. 4
0
    def file_progress(file):
        """
        will be called whenever a file makes progress
        :param file: the File object of the File that has made progress
        """

        # format data
        info = formatted_file_data(file)

        socketio.emit("file_progress", {"data": info})
Esempio n. 5
0
    def cancel_process(file_id):
        """
        cancel a specific Process
        :param file_id: the id of the file corresponding to the Process
        """

        # stop thread
        ProcessRepository.processes[file_id].stop()
        # update status
        file = File.query.filter_by(id=file_id).first()
        file.status = StatusMap.failed.value
        file.clear()
        db.session.commit()

        # emit file_done event
        socketio.emit("file_done", {"data": formatted_file_data(file)})

        # remove from processes dict
        ProcessRepository.processes.pop(file_id)
Esempio n. 6
0
    def cancel_process(file_id):
        """
        cancel a specific Process
        :param file_id: the id of the file corresponding to the Process
        """

        # stop thread
        ProcessRepository.processes[file_id].stop()
        # update status
        file = File.query.filter_by(id=file_id).first()
        file.status = StatusMap.failed.value
        file.clear()
        db.session.commit()

        # emit file_done event
        socketio.emit("file_done", {"data": formatted_file_data(file)})

        # remove from processes dict
        ProcessRepository.processes.pop(file_id)
Esempio n. 7
0
    def check_and_start_processes():
        """
        check if it's required to start new Processes and do so if needed
        """

        while ProcessRepository.encoding_active:
            # grab next potential file to process
            file = FileRepository.get_queued_query().order_by(
                Package.position.asc(), File.position.asc()).first()

            if file is None or ProcessRepository.count_processes_active(
            ) >= config["general"].getint("parallel_processes"):
                break

            # update file.status in DB
            file.status = StatusMap.processing.value
            db.session.commit()

            # start the Process
            from app.modules.mod_process.process import Process
            process = Process(file)
            process.daemon = True

            # todo debug
            # file.status = 0
            # db.session.commit()
            # ProcessRepository.encoding_active = False

            # add to "processes" dict
            ProcessRepository.processes[file.id] = process

            process.start()

            # emit file_started event
            data = formatted_file_data(file)
            data["count_active"] = ProcessRepository.count_processes_active()
            data["count_queued"] = ProcessRepository.count_processes_queued()
            socketio.emit("file_started", {"data": data})
Esempio n. 8
0
    def check_and_start_processes():
        """
        check if it's required to start new Processes and do so if needed
        """

        while ProcessRepository.encoding_active:
            # grab next potential file to process
            file = FileRepository.get_queued_query().order_by(Package.position.asc(), File.position.asc()).first()

            if file is None or ProcessRepository.count_processes_active() >= config["general"].getint(
                    "parallel_processes"):
                break

            # update file.status in DB
            file.status = StatusMap.processing.value
            db.session.commit()

            # start the Process
            from app.modules.mod_process.process import Process
            process = Process(file)
            process.daemon = True

            # todo debug
            # file.status = 0
            # db.session.commit()
            # ProcessRepository.encoding_active = False

            # add to "processes" dict
            ProcessRepository.processes[file.id] = process

            process.start()

            # emit file_started event
            data = formatted_file_data(file)
            data["count_active"] = ProcessRepository.count_processes_active()
            data["count_queued"] = ProcessRepository.count_processes_queued()
            socketio.emit("file_started", {"data": data})