Exemple #1
0
def test_endpoint_config():
    endpoint = EndpointConfig("https://abc.defg/",
                              params={"A": "B"},
                              headers={"X-Powered-By": "Rasa"},
                              basic_auth={
                                  "username": "******",
                                  "password": "******"
                              },
                              token="mytoken",
                              token_name="letoken")

    httpretty.register_uri(httpretty.POST,
                           'https://abc.defg/test',
                           status=500,
                           body='')

    httpretty.enable()
    endpoint.request("post",
                     subpath="test",
                     content_type="application/text",
                     json={"c": "d"},
                     params={"P": "1"})
    httpretty.disable()

    r = httpretty.latest_requests[-1]

    assert json.loads(str(r.body.decode("utf-8"))) == {"c": "d"}
    assert r.headers.get("X-Powered-By") == "Rasa"
    assert r.headers.get("Authorization") == "Basic dXNlcjpwYXNz"
    assert r.querystring.get("A") == ["B"]
    assert r.querystring.get("P") == ["1"]
    assert r.querystring.get("letoken") == ["mytoken"]
Exemple #2
0
def test_custom_token_name():
    test_data = {
        'url': 'http://test',
        'token': 'token',
        'token_name': 'test_token'
    }

    actual = EndpointConfig.from_dict(test_data)

    assert actual.token_name == 'test_token'
Exemple #3
0
def load_data_from_endpoint(data_endpoint: EndpointConfig,
                            language: Optional[Text] = 'en') -> 'TrainingData':
    """Load training data from a URL."""

    if not utils.is_url(data_endpoint.url):
        raise requests.exceptions.InvalidURL(data_endpoint.url)
    try:
        response = data_endpoint.request("get")
        response.raise_for_status()
        temp_data_file = utils.create_temporary_file(response.content,
                                                     mode="w+b")
        training_data = _load(temp_data_file, language)

        return training_data
    except Exception as e:
        logger.warning("Could not retrieve training data "
                       "from URL:\n{}".format(e))
Exemple #4
0
def test_project_with_model_server(zipped_nlu_model):
    fingerprint = 'somehash'
    model_endpoint = EndpointConfig('http://server.com/models/nlu/tags/latest')

    # mock a response that returns a zipped model
    with io.open(zipped_nlu_model, 'rb') as f:
        responses.add(responses.GET,
                      model_endpoint.url,
                      headers={
                          "ETag": fingerprint,
                          "filename": "my_model_xyz.zip"
                      },
                      body=f.read(),
                      content_type='application/zip',
                      stream=True)
    project = load_from_server(model_server=model_endpoint)
    assert project.fingerprint == fingerprint
Exemple #5
0
def _pull_model_and_fingerprint(model_server: EndpointConfig,
                                model_directory: Text,
                                fingerprint: Optional[Text]
                                ) -> (Optional[Text], Optional[Text]):
    """Queries the model server and returns a tuple of containing the

    response's <ETag> header which contains the model hash, and the
    <filename> header containing the model name."""
    header = {"If-None-Match": fingerprint}
    try:
        logger.debug("Requesting model from server {}..."
                     "".format(model_server.url))
        response = model_server.request(method="GET",
                                        headers=header,
                                        timeout=DEFAULT_REQUEST_TIMEOUT)
    except RequestException as e:
        logger.warning("Tried to fetch model from server, but couldn't reach "
                       "server. We'll retry later... Error: {}."
                       "".format(e))
        return None, None

    if response.status_code == 204:
        logger.debug("Model server returned 204 status code, indicating "
                     "that no new model is available. "
                     "Current fingerprint: {}".format(fingerprint))
        return response.headers.get("ETag"), response.headers.get("filename")
    elif response.status_code == 404:
        logger.debug("Model server didn't find a model for our request. "
                     "Probably no one did train a model for the project "
                     "and tag combination yet.")
        return None, None
    elif response.status_code != 200:
        logger.warning("Tried to fetch model from server, but server response "
                       "status code is {}. We'll retry later..."
                       "".format(response.status_code))
        return None, None

    zip_ref = zipfile.ZipFile(IOReader(response.content))
    zip_ref.extractall(model_directory)
    logger.debug("Unzipped model to {}"
                 "".format(os.path.abspath(model_directory)))

    # get the new fingerprint and filename
    return response.headers.get("ETag"), response.headers.get("filename")
Exemple #6
0
    interpreter = trainer.train(training_data, **kwargs)

    if path:
        persisted_path = trainer.persist(path, persistor, project,
                                         fixed_model_name)
    else:
        persisted_path = None

    return trainer, interpreter, persisted_path


if __name__ == '__main__':
    cmdline_args = create_argument_parser().parse_args()

    utils.configure_colored_logging(cmdline_args.loglevel)

    if cmdline_args.url:
        data_endpoint = EndpointConfig(cmdline_args.url)
    else:
        data_endpoint = read_endpoints(cmdline_args.endpoints).data

    train(cmdline_args.config,
          cmdline_args.data,
          cmdline_args.path,
          cmdline_args.project,
          cmdline_args.fixed_model_name,
          cmdline_args.storage,
          training_data_endpoint=data_endpoint,
          num_threads=cmdline_args.num_threads)
    logger.info("Finished training")