예제 #1
0
def interact(model: Chainer, payload: Dict[str, Optional[List]]) -> List:
    model_args = payload.values()
    dialog_logger.log_in(payload)
    error_msg = None
    lengths = {
        len(model_arg)
        for model_arg in model_args if model_arg is not None
    }

    if not lengths:
        error_msg = 'got empty request'
    elif 0 in lengths:
        error_msg = 'got empty array as model argument'
    elif len(lengths) > 1:
        error_msg = 'got several different batch sizes'

    if error_msg is not None:
        log.error(error_msg)
        raise HTTPException(status_code=400, detail=error_msg)

    batch_size = next(iter(lengths))
    model_args = [arg or [None] * batch_size for arg in model_args]

    prediction = model(*model_args)
    if len(model.out_params) == 1:
        prediction = [prediction]
    prediction = list(zip(*prediction))
    result = jsonify_data(prediction)
    dialog_logger.log_out(result)
    return result
예제 #2
0
    def _log(self, utterance: Any, direction: str, dialog_id: Optional[Hashable]=None):
        """Logs single dialog utterance to current dialog log file.

        Args:
            utterance: Dialog utterance.
            direction: 'in' or 'out' utterance direction.
            dialog_id: Dialog ID.
        """
        if isinstance(utterance, str):
            pass
        elif isinstance(utterance, (list, dict)):
            utterance = jsonify_data(utterance)
        else:
            utterance = str(utterance)

        dialog_id = str(dialog_id) if not isinstance(dialog_id, str) else dialog_id

        if self.log_file.tell() >= self.log_max_size * 1024:
            self.log_file.close()
            self.log_file = self._get_log_file()
        else:
            try:
                log_msg = {}
                log_msg['timestamp'] = self._get_timestamp_utc_str()
                log_msg['dialog_id'] = dialog_id
                log_msg['direction'] = direction
                log_msg['message'] = utterance
                log_str = json.dumps(log_msg, ensure_ascii=self.config['ensure_ascii'])
                self.log_file.write(f'{log_str}\n')
            except IOError:
                log.error('Failed to write dialog log.')
예제 #3
0
def interact(model, params_names):
    if not request.is_json:
        return jsonify({
            "error": "request must contains json data"
        }), 400

    model_args = []

    data = request.get_json()
    for param_name in params_names:
        param_value = data.get(param_name)
        if param_value is None or (isinstance(param_value, list) and len(param_value) > 0):
            model_args.append(param_value)
        else:
            return jsonify({'error': f"nonempty array expected but got '{param_name}'={repr(param_value)}"}), 400

    lengths = {len(i) for i in model_args if i is not None}

    if not lengths:
        return jsonify({'error': 'got empty request'}), 400
    elif len(lengths) > 1:
        return jsonify({'error': 'got several different batch sizes'}), 400

    if len(params_names) == 1:
        model_args = model_args[0]
    else:
        batch_size = list(lengths)[0]
        model_args = [arg or [None] * batch_size for arg in model_args]
        model_args = list(zip(*model_args))

    prediction = model(model_args)
    result = jsonify_data(prediction)
    return jsonify(result), 200
예제 #4
0
def encode(data: Any) -> bytes:
    """Сonverts data to the socket server input formatted bytes array.

    Serializes ``data`` to the JSON formatted bytes array and adds 4 bytes to the beginning of the array - packed
    to bytes length of the JSON formatted bytes array. Header format is "<I"
    (see https://docs.python.org/3/library/struct.html#struct-format-strings)

    Args:
        data: Object to pact to the bytes array.

    Raises:
        TypeError: If data is not JSON-serializable object.

    Examples:
        >>> from deeppavlov.utils.socket import encode
        >>> encode({'a':1})
        b'\x08\x00\x00\x00{"a": 1}
        >>> encode([42])
        b'\x04\x00\x00\x00[42]'

    """
    json_data = jsonify_data(data)
    bytes_data = json.dumps(json_data).encode()
    response = pack(HEADER_FORMAT, len(bytes_data)) + bytes_data
    return response
예제 #5
0
def predict_on_stream(config: Union[str, Path, dict],
                      batch_size: Optional[int] = None,
                      file_path: Optional[str] = None) -> None:
    """Make a prediction with the component described in corresponding configuration file."""

    batch_size = batch_size or 1
    if file_path is None or file_path == '-':
        if sys.stdin.isatty():
            raise RuntimeError('To process data from terminal please use interact mode')
        f = sys.stdin
    else:
        f = open(file_path, encoding='utf8')

    model: Chainer = build_model(config)

    args_count = len(model.in_x)
    while True:
        batch = list((l.strip() for l in islice(f, batch_size * args_count)))

        if not batch:
            break

        args = []
        for i in range(args_count):
            args.append(batch[i::args_count])

        res = model(*args)
        if len(model.out_params) == 1:
            res = [res]
        for res in zip(*res):
            res = json.dumps(jsonify_data(res), ensure_ascii=False)
            print(res, flush=True)

    if f is not sys.stdin:
        f.close()
예제 #6
0
    def _log(self, utterance: Any, direction: str, dialog_id: Optional[Hashable]=None):
        """Logs single dialog utterance to current dialog log file.

        Args:
            utterance: Dialog utterance.
            direction: 'in' or 'out' utterance direction.
            dialog_id: Dialog ID.
        """
        if isinstance(utterance, str):
            pass
        elif isinstance(utterance, RichMessage):
            utterance = utterance.json()
        elif isinstance(utterance, (list, dict)):
            utterance = jsonify_data(utterance)
        else:
            utterance = str(utterance)

        dialog_id = str(dialog_id) if not isinstance(dialog_id, str) else dialog_id

        if self.log_file.tell() >= self.log_max_size * 1024:
            self.log_file.close()
            self.log_file = self._get_log_file()
        else:
            try:
                log_msg = {}
                log_msg['timestamp'] = self._get_timestamp_utc_str()
                log_msg['dialog_id'] = dialog_id
                log_msg['direction'] = direction
                log_msg['message'] = utterance
                log_str = json.dumps(log_msg, ensure_ascii=self.config['ensure_ascii'])
                self.log_file.write(f'{log_str}\n')
            except IOError:
                log.error('Failed to write dialog log.')
예제 #7
0
    async def _response(status: str, payload: Optional[List[Tuple]]) -> bytes:
        """Puts arguments into dict and serialize it to JSON formatted byte array.

        Args:
            status: Response status. 'OK' if no error has occurred, otherwise error message.
            payload: DeepPavlov model result if no error has occurred, otherwise None.

        Returns:
            dict({'status': status, 'payload': payload}) serialized to a JSON formatted byte array.

        """
        resp_dict = jsonify_data({'status': status, 'payload': payload})
        resp_str = json.dumps(resp_dict)
        return resp_str.encode('utf-8')
예제 #8
0
def interact(model, params_names):
    if not request.is_json:
        return jsonify({"error": "request must contains json data"}), 400

    model_args = []

    for param_name in params_names:
        param_value = request.get_json().get(param_name)
        model_args.append(param_value)
    if len(params_names) > 1:
        model_args = [model_args]

    prediction = model(model_args)
    result = jsonify_data(prediction[0])
    return jsonify(result), 200
예제 #9
0
def interact(model: Chainer, params_names: List[str]) -> Tuple[Response, int]:
    if not request.is_json:
        log.error("request Content-Type header is not application/json")
        return jsonify(
            {"error":
             "request Content-Type header is not application/json"}), 400

    model_args = []

    data = request.get_json()
    dialog_logger.log_in(data)
    for param_name in params_names:
        param_value = data.get(param_name)
        if param_value is None or (isinstance(param_value, list)
                                   and len(param_value) > 0):
            model_args.append(param_value)
        else:
            log.error(
                f"nonempty array expected but got '{param_name}'={repr(param_value)}"
            )
            return jsonify({
                'error':
                f"nonempty array expected but got '{param_name}'={repr(param_value)}"
            }), 400

    lengths = {len(i) for i in model_args if i is not None}

    if not lengths:
        log.error('got empty request')
        return jsonify({'error': 'got empty request'}), 400
    elif len(lengths) > 1:
        log.error('got several different batch sizes')
        return jsonify({'error': 'got several different batch sizes'}), 400

    batch_size = list(lengths)[0]
    model_args = [arg or [None] * batch_size for arg in model_args]

    # in case when some parameters were not described in model_args
    model_args += [[None] * batch_size
                   for _ in range(len(model.in_x) - len(model_args))]

    prediction = model(*model_args)
    if len(model.out_params) == 1:
        prediction = [prediction]
    prediction = list(zip(*prediction))
    result = jsonify_data(prediction)
    dialog_logger.log_out(result)
    return jsonify(result), 200
예제 #10
0
    async def _interact(self, payloads: List[Dict]) -> List[Any]:
        """Infers model with the batch."""
        batch = defaultdict(list)

        for payload in payloads:
            for arg_name in self._model_args_names:
                batch[arg_name].extend(payload.get(arg_name, [None]))

        dialog_logger.log_in(batch)

        prediction = self._model(*batch.values())
        if len(self._model.out_params) == 1:
            prediction = [prediction]
        prediction = list(zip(*prediction))
        result = jsonify_data(prediction)

        dialog_logger.log_out(result)

        return result
예제 #11
0
def interact(model: Chainer, params_names: List[str]) -> Tuple[Response, int]:
    if not request.is_json:
        log.error("request Content-Type header is not application/json")
        return jsonify({
            "error": "request Content-Type header is not application/json"
        }), 400

    model_args = []

    data = request.get_json()
    dialog_logger.log_in(data)
    for param_name in params_names:
        param_value = data.get(param_name)
        if param_value is None or (isinstance(param_value, list) and len(param_value) > 0):
            model_args.append(param_value)
        else:
            log.error(f"nonempty array expected but got '{param_name}'={repr(param_value)}")
            return jsonify({'error': f"nonempty array expected but got '{param_name}'={repr(param_value)}"}), 400

    lengths = {len(i) for i in model_args if i is not None}

    if not lengths:
        log.error('got empty request')
        return jsonify({'error': 'got empty request'}), 400
    elif len(lengths) > 1:
        log.error('got several different batch sizes')
        return jsonify({'error': 'got several different batch sizes'}), 400

    batch_size = list(lengths)[0]
    model_args = [arg or [None] * batch_size for arg in model_args]

    # in case when some parameters were not described in model_args
    model_args += [[None] * batch_size for _ in range(len(model.in_x) - len(model_args))]

    prediction = model(*model_args)
    if len(model.out_params) == 1:
        prediction = [prediction]
    prediction = list(zip(*prediction))
    result = jsonify_data(prediction)
    dialog_logger.log_out(result)
    return jsonify(result), 200