Exemplo n.º 1
0
    def test_model_predict(self):
        predictions = [{'output_1': [0.901], 'output_2': [0.997]}]
        builder = http.RequestMockBuilder({
            'ml.projects.predict':
            (None, self._make_response_body(predictions, successful=True))
        })
        resource = discovery.build('ml',
                                   'v1',
                                   http=http.HttpMock(
                                       self._discovery_testdata_dir,
                                       {'status': http_client.OK}),
                                   requestBuilder=builder)
        with mock.patch('googleapiclient.discovery.' 'build') as response_mock:
            response_mock.side_effect = lambda service, version: resource
            inference_spec_type = model_spec_pb2.InferenceSpecType(
                ai_platform_prediction_model_spec=model_spec_pb2.
                AIPlatformPredictionModelSpec(
                    project_id='test-project',
                    model_name='test-model',
                ))

            prediction_log = prediction_log_pb2.PredictionLog()
            prediction_log.predict_log.response.outputs['output_1'].CopyFrom(
                tf.make_tensor_proto(values=[0.901],
                                     dtype=tf.double,
                                     shape=(1, 1)))
            prediction_log.predict_log.response.outputs['output_2'].CopyFrom(
                tf.make_tensor_proto(values=[0.997],
                                     dtype=tf.double,
                                     shape=(1, 1)))

            self._set_up_pipeline(inference_spec_type)
            assert_that(self.pcoll, equal_to([prediction_log]))
            self._run_inference_with_beam()
Exemplo n.º 2
0
    def test_exception_raised_when_response_body_contains_error_entry(self):
        error_msg = 'Base64 decode failed.'
        builder = http.RequestMockBuilder({
            'ml.projects.predict':
            (None, self._make_response_body(error_msg, successful=False))
        })
        resource = discovery.build('ml',
                                   'v1',
                                   http=http.HttpMock(
                                       self._discovery_testdata_dir,
                                       {'status': http_client.OK}),
                                   requestBuilder=builder)
        with mock.patch('googleapiclient.discovery.' 'build') as response_mock:
            response_mock.side_effect = lambda service, version: resource
            inference_spec_type = model_spec_pb2.InferenceSpecType(
                ai_platform_prediction_model_spec=model_spec_pb2.
                AIPlatformPredictionModelSpec(
                    project_id='test-project',
                    model_name='test-model',
                ))

            try:
                self._set_up_pipeline(inference_spec_type)
                self._run_inference_with_beam()
            except ValueError as exc:
                actual_error_msg = str(exc)
                self.assertTrue(actual_error_msg.startswith(error_msg))
            else:
                self.fail('Test was expected to throw ValueError exception')
    def test_build_http(self):
        """Verify set user agent sets the user agent correctly."""
        http_mock = http.HttpMock()
        h = base.http_helpers.build_http(http=http_mock)

        _ = h.request('http://test.foo', 'GET')
        self.assertTrue(
            forseti_security.__package_name__ in h.headers.get('user-agent'))
def mock_http_response(response, status='200'):
    """Set the mock response to an http request."""
    http_mock = http.HttpMock()
    http_mock.response_headers = {
        'status': status,
        'content-type': 'application/json',
    }
    http_mock.data = response
    _base_repository.LOCAL_THREAD.http = http_mock
    def test_build_http_multiple(self):
        """Verify set user agent sets the user agent only once."""
        http_mock = http.HttpMock()
        h = base.http_helpers.build_http(http=http_mock)
        for _ in range(5):
            h = base.http_helpers.build_http(http=h)

        _ = h.request('http://test.foo', 'GET')
        user_agent = h.headers.get('user-agent')
        forseti_agent_count = user_agent.count(
            forseti_security.__package_name__)
        self.assertEqual(1, forseti_agent_count)
Exemplo n.º 6
0
    def test_set_user_agent(self):
        """Verify set user agent sets the user agent correctly."""
        credentials = self.get_test_credential()

        self.assertEqual('', credentials.user_agent)

        base._set_user_agent(credentials)

        self.assertTrue(
            forseti_security.__package_name__ in credentials.user_agent)

        http_mock = http.HttpMock()
        credentials.authorize(http_mock)

        # The user-agent header is set during the request
        self.assertEqual(None, http_mock.headers)

        _ = http_mock.request('http://test.foo', 'GET')
        self.assertTrue(forseti_security.__package_name__ in
                        http_mock.headers.get('user-agent'))
Exemplo n.º 7
0
  def test_can_format_requests(self):
    predictions = [{
        'output_1': [0.901],
        'output_2': [0.997]
    }] * len(self._predict_examples)
    builder = http.RequestMockBuilder({
        'ml.projects.predict':
            (None, self._make_response_body(predictions, successful=True))
    })
    resource = discovery.build(
        'ml',
        'v1',
        http=http.HttpMock(self._discovery_testdata_dir,
                           {'status': http_client.OK}),
        requestBuilder=builder)
    with mock.patch('googleapiclient.discovery.' 'build') as response_mock:
      response_mock.side_effect = lambda service, version: resource
      inference_spec_type = model_spec_pb2.InferenceSpecType(
          ai_platform_prediction_model_spec=model_spec_pb2
          .AIPlatformPredictionModelSpec(
              project_id='test-project',
              model_name='test-model',
          ))

      example = text_format.Parse(
          """
        features {
          feature { key: "x_bytes" value { bytes_list { value: ["ASa8asdf"] }}}
          feature { key: "x" value { bytes_list { value: "JLK7ljk3" }}}
          feature { key: "y" value { int64_list { value: [1, 2] }}}
          feature { key: "z" value { float_list { value: [4.5, 5, 5.5] }}}
        }
        """, tf.train.Example())

      self.pipeline = self._make_beam_pipeline()
      self.pcoll = (
          self.pipeline
          | 'CreateExamples' >> beam.Create([example])
          | 'RunInference'
          >> run_inference.RunInferenceImpl(inference_spec_type))
      self._run_inference_with_beam()
Exemplo n.º 8
0
  def test_model_predict(self, keyed_input: bool):
    predictions = [{
        'output_1': [0.901],
        'output_2': [0.997]
    }] * len(self._predict_examples)
    builder = http.RequestMockBuilder({
        'ml.projects.predict':
            (None, self._make_response_body(predictions, successful=True))
    })
    resource = discovery.build(
        'ml',
        'v1',
        http=http.HttpMock(self._discovery_testdata_dir,
                           {'status': http_client.OK}),
        requestBuilder=builder)
    with mock.patch('googleapiclient.discovery.' 'build') as response_mock:
      response_mock.side_effect = lambda service, version: resource
      inference_spec_type = model_spec_pb2.InferenceSpecType(
          ai_platform_prediction_model_spec=model_spec_pb2
          .AIPlatformPredictionModelSpec(
              project_id='test-project',
              model_name='test-model',
          ))
      expected = []
      for example in self._predict_examples:
        prediction_log = prediction_log_pb2.PredictionLog()
        predict_log = prediction_log.predict_log
        input_tensor_proto = predict_log.request.inputs['inputs']
        input_tensor_proto.dtype = tf.string.as_datatype_enum
        input_tensor_proto.tensor_shape.dim.add().size = 1
        input_tensor_proto.string_val.append(example.SerializeToString())
        predict_log.response.outputs['output_1'].CopyFrom(
            tf.make_tensor_proto(values=[0.901], dtype=tf.double, shape=(1, 1)))
        predict_log.response.outputs['output_2'].CopyFrom(
            tf.make_tensor_proto(values=[0.997], dtype=tf.double, shape=(1, 1)))
        expected.append(prediction_log)

      self._set_up_pipeline(inference_spec_type, keyed_input)
      assert_that(self.pcoll, equal_to(expected))
      self._run_inference_with_beam()