Exemplo n.º 1
0
def recognize_batch():
    data = {
        "model": request.args.get("lang", "en-GB"),
        "lm": request.args.get("lm", "default"),
        "wav": request.data
    }

    def generate(response):
        for result in response:
            yield json.dumps(result, ensure_ascii=False)

    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        response = worker.recognize_batch(data, request.headers)
        worker.close()

        return Response(stream_with_context(generate(response)))
    except MissingHeaderError:
        return jsonify({
            "status": "error",
            "message": "Missing header Content-Type"
        }), 400
    except NoWorkerAvailableError:
        return jsonify({
            "status": "error",
            "message": "No worker available"
        }), 503
    except WorkerInternalError:
        return jsonify({
            "status": "error",
            "message": "Input file corrupted"
        }), 400
    finally:
        worker.close()
Exemplo n.º 2
0
def begin_online_recognition(message):
    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        worker.connect_to_worker(message["model"])

        session["worker"] = worker
    except NoWorkerAvailableError:
        emit('server_error', {"status": "error", "message": "No worker available"})
Exemplo n.º 3
0
def begin_online_recognition(message):
    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        worker.connect_to_worker(message["model"])

        session["worker"] = worker
        session["connected"] = True
    except NoWorkerAvailableError:
        emit('server_error', {"status": "error", "message": "No worker available"})
        worker.close()
Exemplo n.º 4
0
def recognize_batch():
    data = {
        "model": request.args.get("lang", "en-GB"),
        "wav": request.data
    }

    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        return jsonify(worker.recognize_batch(data, request.headers))
    except MissingHeaderError:
        return jsonify({"status": "error", "message": "Missing header Content-Type"}), 400
    except NoWorkerAvailableError:
        return jsonify({"status": "error", "message": "No worker available"}), 503
Exemplo n.º 5
0
def recognize_batch():
    data = {"model": request.args.get("lang", "en-GB"), "wav": request.data}

    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        return jsonify(worker.recognize_batch(data, request.headers))
    except MissingHeaderError:
        return jsonify({
            "status": "error",
            "message": "Missing header Content-Type"
        }), 400
    except NoWorkerAvailableError:
        return jsonify({
            "status": "error",
            "message": "No worker available"
        }), 503
Exemplo n.º 6
0
def recognize_batch():
    data = {
        "model": request.args.get("lang", "en-GB"),
        "lm": request.args.get("lm", "default"),
        "wav": request.data
    }

    def generate(response):
        for result in response:
            yield json.dumps(result)

    try:
        worker = create_frontend_worker(os.environ['MASTER_ADDR'])
        response = worker.recognize_batch(data, request.headers)
        worker.close()

        return Response(stream_with_context(generate(response)))
    except MissingHeaderError:
        return jsonify({"status": "error", "message": "Missing header Content-Type"}), 400
    except NoWorkerAvailableError:
        return jsonify({"status": "error", "message": "No worker available"}), 503
    finally:
        worker.close()
Exemplo n.º 7
0
 def test_can_create_frontend_worker(self):
     worker = create_frontend_worker("ipc:///tmp/worker")
     self.assertIsInstance(worker, FrontendWorker)
Exemplo n.º 8
0
 def test_can_create_frontend_worker(self):
     worker = create_frontend_worker("ipc:///tmp/worker")
     self.assertIsInstance(worker, FrontendWorker)