def testDo_InfraBlessingAsModel_FailIfNoWarmup(self): infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', True) infra_blessing.set_int_custom_property('has_model', 0) with self.assertRaisesRegex(RuntimeError, 'InfraBlessing does not contain a model'): self._executor.Do( { standard_component_specs.INFRA_BLESSING_KEY: [infra_blessing] }, self._output_dict, self._exec_properties)
def testDo_InfraNotBlessed_NotPushed(self): # Prepare blessed ModelBlessing and **not** blessed InfraBlessing. self._model_blessing.set_int_custom_property('blessed', 1) # Blessed. infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', 0) # Not blessed. input_dict = {INFRA_BLESSING_KEY: [infra_blessing]} input_dict.update(self._input_dict) # Run executor self._executor.Do(input_dict, self._output_dict, self._exec_properties) # Check model is not pushed. self.assertNotPushed()
def testDo_ModelBlessedAndInfraBlessed_Pushed(self): # Prepare blessed ModelBlessing and blessed InfraBlessing. self._model_blessing.set_int_custom_property('blessed', 1) # Blessed. infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', 1) # Blessed. input_dict = {'infra_blessing': [infra_blessing]} input_dict.update(self._input_dict) # Run executor self._executor.Do(input_dict, self._output_dict, self._exec_properties) # Check model is pushed. self.assertPushed()
def testDo_NoModelBlessing_InfraNotBlessed_NotPushed(self): # Prepare unsuccessful InfraBlessing only (without ModelBlessing). infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', 0) # Not blessed. input_dict = { MODEL_KEY: self._input_dict[MODEL_KEY], INFRA_BLESSING_KEY: [infra_blessing], } # Run executor self._executor.Do(input_dict, self._output_dict, self._exec_properties) # Check model is not pushed. self.assertNotPushed()
def testDo_NoModelBlessing_InfraBlessed_Pushed(self): # Prepare successful InfraBlessing only (without ModelBlessing). infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', 1) # Blessed. input_dict = { standard_component_specs.MODEL_KEY: self._input_dict[standard_component_specs.MODEL_KEY], standard_component_specs.INFRA_BLESSING_KEY: [infra_blessing], } # Run executor self._executor.Do(input_dict, self._output_dict, self._exec_properties) # Check model is pushed. self.assertPushed()
def __init__( self, model: types.Channel, serving_spec: infra_validator_pb2.ServingSpec, examples: Optional[types.Channel] = None, blessing: Optional[types.Channel] = None, request_spec: Optional[infra_validator_pb2.RequestSpec] = None, validation_spec: Optional[ infra_validator_pb2.ValidationSpec] = None, instance_name: Optional[Text] = None, enable_cache: Optional[bool] = None): """Construct a InfraValidator component. Args: model: A `Channel` of `ModelExportPath` type, usually produced by [Trainer](https://www.tensorflow.org/tfx/guide/trainer) component. _required_ serving_spec: A `ServingSpec` configuration about serving binary and test platform config to launch model server for validation. _required_ examples: A `Channel` of `ExamplesPath` type, usually produced by [ExampleGen](https://www.tensorflow.org/tfx/guide/examplegen) component. If not specified, InfraValidator does not issue requests for validation. blessing: Output `Channel` of `InfraBlessingPath` that contains the validation result. request_spec: Optional `RequestSpec` configuration about making requests from `examples` input. If not specified, InfraValidator does not issue requests for validation. validation_spec: Optional `ValidationSpec` configuration. instance_name: Optional name assigned to this specific instance of InfraValidator. Required only if multiple InfraValidator components are declared in the same pipeline. enable_cache: Optional boolean to indicate if cache is enabled for the InfraValidator component. If not specified, defaults to the value specified for pipeline's enable_cache parameter. """ blessing = blessing or types.Channel( type=standard_artifacts.InfraBlessing, artifacts=[standard_artifacts.InfraBlessing()]) spec = standard_component_specs.InfraValidatorSpec( model=model, examples=examples, blessing=blessing, serving_spec=serving_spec, validation_spec=validation_spec, request_spec=request_spec) super(InfraValidator, self).__init__(spec=spec, instance_name=instance_name, enable_cache=enable_cache)
def testDo_InfraNotBlessed_NotPushed(self): # Prepare blessed ModelBlessing and **not** blessed InfraBlessing. self._model_blessing.set_int_custom_property('blessed', 1) # Blessed. infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.set_int_custom_property('blessed', 0) # Not blessed. input_dict = {'infra_blessing': [infra_blessing]} input_dict.update(self._input_dict) # Run executor self._executor.Do(input_dict, self._output_dict, self._exec_properties) # Check model is not pushed. self.assertDirectoryEmpty(self._serving_model_dir) self.assertDirectoryEmpty(self._model_push.uri) self.assertEqual( 0, self._model_push.mlmd_artifact.custom_properties['pushed'].int_value)
def testDo_InfraBlessingAsModel(self): infra_blessing = standard_artifacts.InfraBlessing() infra_blessing.uri = os.path.join(self._output_data_dir, 'infra_blessing') infra_blessing.set_int_custom_property('blessed', True) infra_blessing.set_int_custom_property('has_model', 1) # Create dummy model blessed_model_path = path_utils.stamped_model_path(infra_blessing.uri) fileio.makedirs(blessed_model_path) io_utils.write_string_file( os.path.join(blessed_model_path, 'my-model'), '') self._executor.Do({INFRA_BLESSING_KEY: [infra_blessing]}, self._output_dict, self._exec_properties) self.assertPushed() self.assertTrue( fileio.exists(os.path.join(self._model_push.uri, 'my-model')))
def setUp(self): super(ExecutorTest, self).setUp() # Setup Mocks patcher = mock.patch.object(request_builder, 'build_requests') self.build_requests_mock = patcher.start() self.addCleanup(patcher.stop) # Setup directories source_data_dir = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'testdata') base_output_dir = os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()) output_data_dir = os.path.join(base_output_dir, self._testMethodName) # Setup input_dict. self._model = standard_artifacts.Model() self._model.uri = os.path.join(source_data_dir, 'trainer', 'current') self._model_path = path_utils.serving_model_path(self._model.uri) examples = standard_artifacts.Examples() examples.uri = os.path.join(source_data_dir, 'transform', 'transformed_examples', 'eval') examples.split_names = artifact_utils.encode_split_names(['eval']) self._input_dict = { MODEL_KEY: [self._model], EXAMPLES_KEY: [examples], } self._blessing = standard_artifacts.InfraBlessing() self._blessing.uri = os.path.join(output_data_dir, 'blessing') self._output_dict = {BLESSING_KEY: [self._blessing]} temp_dir = os.path.join(output_data_dir, '.temp') self._context = executor.Executor.Context(tmp_dir=temp_dir, unique_id='1') self._serving_spec = _make_serving_spec({ 'tensorflow_serving': { 'tags': ['1.15.0'] }, 'local_docker': {}, 'model_name': 'chicago-taxi', }) self._serving_binary = serving_bins.parse_serving_binaries( self._serving_spec)[0] self._validation_spec = _make_validation_spec({ 'max_loading_time_seconds': 10, 'num_tries': 3 }) self._request_spec = _make_request_spec({ 'tensorflow_serving': { 'signature_names': ['serving_default'], }, 'num_examples': 1 }) self._exec_properties = { SERVING_SPEC_KEY: proto_utils.proto_to_json(self._serving_spec), VALIDATION_SPEC_KEY: proto_utils.proto_to_json(self._validation_spec), REQUEST_SPEC_KEY: proto_utils.proto_to_json(self._request_spec), }
def setUp(self): super(ExecutorTest, self).setUp() # Setup Mocks runner_patcher = mock.patch('tfx.components.infra_validator' '.model_server_runners.local_docker_runner' '.LocalDockerModelServerRunner') self.model_server = runner_patcher.start().return_value self.addCleanup(runner_patcher.stop) build_request_patcher = mock.patch( 'tfx.components.infra_validator.request_builder' '.build_requests') self.build_requests_mock = build_request_patcher.start() self.addCleanup(build_request_patcher.stop) self.model_server.client = mock.MagicMock() # Setup directories source_data_dir = os.path.join( os.path.dirname(os.path.dirname(__file__)), 'testdata') base_output_dir = os.environ.get('TEST_UNDECLARED_OUTPUTS_DIR', self.get_temp_dir()) output_data_dir = os.path.join(base_output_dir, self._testMethodName) # Setup input_dict. model = standard_artifacts.Model() model.uri = os.path.join(source_data_dir, 'trainer', 'current') examples = standard_artifacts.Examples() examples.uri = os.path.join(source_data_dir, 'transform', 'transformed_examples', 'eval') examples.split_names = artifact_utils.encode_split_names(['eval']) self.input_dict = { 'model': [model], 'examples': [examples], } # Setup output_dict. self.blessing = standard_artifacts.InfraBlessing() self.blessing.uri = os.path.join(output_data_dir, 'blessing') self.output_dict = {'blessing': [self.blessing]} # Setup Context temp_dir = os.path.join(output_data_dir, '.temp') self.context = executor.Executor.Context(tmp_dir=temp_dir, unique_id='1') # Setup exec_properties self.exec_properties = { 'serving_spec': json.dumps({ 'tensorflow_serving': { 'tags': ['1.15.0'] }, 'local_docker': {} }), 'validation_spec': json.dumps({'max_loading_time_seconds': 10}), 'request_spec': json.dumps({ 'tensorflow_serving': { 'rpc_kind': 'CLASSIFY' }, 'max_examples': 10 }) }