Ejemplo n.º 1
0
async def chat(request_data: TextData,
               current_user: User = Depends(auth.get_current_user)):
    """
    Fetches a bot response for a given text/query.
    It is basically used to test the chat functionality of the agent
    """
    model = AgentProcessor.get_agent(current_user.get_bot())
    response = await model.handle_text(request_data.data,
                                       sender_id=current_user.get_user())
    return {"data": {"response": response}}
Ejemplo n.º 2
0
def start_training(bot: str, user: str, token: str = None, reload=True):
    """
    prevents training of the bot,
    if the training session is in progress otherwise start training

    :param reload: whether to reload model in the cache
    :param bot: bot id
    :param token: JWT token for remote model reload
    :param user: user id
    :return: model path
    """
    exception = None
    model_file = None
    training_status = None
    if Utility.environment.get('model') and Utility.environment['model'][
            'train'].get('event_url'):
        Utility.train_model_event(bot, user, token)
    else:
        try:
            model_file = train_model_for_bot(bot)
            training_status = MODEL_TRAINING_STATUS.DONE.value
            agent_url = Utility.environment['model']['train'].get('agent_url')
            if agent_url:
                Utility.http_request(
                    'get', urljoin(agent_url, "/api/bot/model/reload"), token,
                    user)
            else:
                if reload:
                    AgentProcessor.reload(bot)
        except Exception as e:
            logging.exception(e)
            training_status = MODEL_TRAINING_STATUS.FAIL.value
            exception = str(e)
        finally:
            ModelProcessor.set_training_status(
                bot=bot,
                user=user,
                status=training_status,
                model_path=model_file,
                exception=exception,
            )
    return model_file
Ejemplo n.º 3
0
async def predict_intent(request_data: TextData,
                         current_user: User = Depends(auth.get_current_user)):
    """
    Fetches the predicted intent of the entered text form the loaded agent
    """
    model = AgentProcessor.get_agent(current_user.get_bot())
    response = await model.parse_message_using_nlu_interpreter(
        request_data.data)
    intent = response.get("intent").get("name") if response else None
    confidence = response.get("intent").get("confidence") if response else None
    return {"data": {"intent": intent, "confidence": confidence}}
Ejemplo n.º 4
0
def start_training(bot: str, user: str, reload=True):
    """
    prevents training of the bot,
    if the training session is in progress otherwise start training

    :param reload: whether to reload model in the cache
    :param bot: bot id
    :param user: user id
    :return: model path
    """
    exception = None
    model_file = None
    training_status = None

    ModelProcessor.set_training_status(
        bot=bot, user=user, status=MODEL_TRAINING_STATUS.INPROGRESS.value,
    )
    try:
        model_file = train_model_for_bot(bot)
        training_status = MODEL_TRAINING_STATUS.DONE.value
    except Exception as e:
        logging.exception(e)
        training_status = MODEL_TRAINING_STATUS.FAIL.value
        exception = str(e)
        raise AppException(exception)
    finally:
        ModelProcessor.set_training_status(
            bot=bot,
            user=user,
            status=training_status,
            model_path=model_file,
            exception=exception,
        )
    if reload:
        AgentProcessor.reload(bot)
    return model_file
Ejemplo n.º 5
0
async def chat(
        request_data: TextData, current_user: User = Depends(auth.get_current_user)
):
    """
    Fetches a bot response for a given text/query.
    It is basically used to test the chat functionality of the agent
    """
    if Utility.environment.get('model') and Utility.environment['model']['train'].get('agent_url'):
        agent_url = Utility.environment['model']['train'].get('agent_url')
        token = auth.create_access_token(data={"sub": current_user.email})
        response = Utility.http_request('post', urljoin(agent_url, "/api/bot/chat"), token.decode('utf8'), current_user.get_user(), json={'data': request_data.data})
    else:
        model = AgentProcessor.get_agent(current_user.get_bot())
        response = await model.handle_text(
            request_data.data, sender_id=current_user.get_user()
        )
        response = {"data": {"response": response}}
    return response
Ejemplo n.º 6
0
async def download_model(
        background_tasks: BackgroundTasks,
        current_user: User = Depends(auth.get_current_user),
):
    """
    Downloads latest trained model file
    """
    try:
        model_path = AgentProcessor.get_latest_model(current_user.get_bot())
        response = FileResponse(
            model_path,
            filename=os.path.basename(model_path),
            background=background_tasks,
        )
        response.headers[
            "Content-Disposition"] = "attachment; filename=" + os.path.basename(
                model_path)
        return response
    except Exception as e:
        raise AppException(str(e))