def __init__(self, examples: types.Channel = None, model: Optional[types.Channel] = None, model_blessing: Optional[types.Channel] = None, data_spec: Optional[Union[bulk_inferrer_pb2.DataSpec, Dict[Text, Any]]] = None, model_spec: Optional[Union[bulk_inferrer_pb2.ModelSpec, Dict[Text, Any]]] = None, output_example_spec: Optional[Union[ bulk_inferrer_pb2.OutputExampleSpec, Dict[Text, Any]]] = None): """Construct an BulkInferrer component. Args: examples: A Channel of type `standard_artifacts.Examples`, usually produced by an ExampleGen component. _required_ model: A Channel of type `standard_artifacts.Model`, usually produced by a Trainer component. model_blessing: A Channel of type `standard_artifacts.ModelBlessing`, usually produced by a ModelValidator component. data_spec: bulk_inferrer_pb2.DataSpec instance that describes data selection. If any field is provided as a RuntimeParameter, data_spec should be constructed as a dict with the same field names as DataSpec proto message. model_spec: bulk_inferrer_pb2.ModelSpec instance that describes model specification. If any field is provided as a RuntimeParameter, model_spec should be constructed as a dict with the same field names as ModelSpec proto message. output_example_spec: bulk_inferrer_pb2.OutputExampleSpec instance, specify if you want BulkInferrer to output examples instead of inference result. If any field is provided as a RuntimeParameter, output_example_spec should be constructed as a dict with the same field names as OutputExampleSpec proto message. """ if output_example_spec: output_examples = types.Channel(type=standard_artifacts.Examples) inference_result = None else: inference_result = types.Channel( type=standard_artifacts.InferenceResult) output_examples = None spec = BulkInferrerSpec(examples=examples, model=model, model_blessing=model_blessing, data_spec=data_spec or bulk_inferrer_pb2.DataSpec(), model_spec=model_spec or bulk_inferrer_pb2.ModelSpec(), output_example_spec=output_example_spec, inference_result=inference_result, output_examples=output_examples) super(BulkInferrer, self).__init__(spec=spec)
def __init__(self, examples: types.Channel = None, model: Optional[types.Channel] = None, model_blessing: Optional[types.Channel] = None, data_spec: Optional[Union[bulk_inferrer_pb2.DataSpec, Dict[Text, Any]]] = None, model_spec: Optional[Union[bulk_inferrer_pb2.ModelSpec, Dict[Text, Any]]] = None, inference_result: Optional[types.Channel] = None, instance_name: Optional[Text] = None, enable_cache: Optional[bool] = None): """Construct an BulkInferrer component. Args: examples: A Channel of type `standard_artifacts.Examples`, usually produced by an ExampleGen component. _required_ model: A Channel of type `standard_artifacts.Model`, usually produced by a Trainer component. model_blessing: A Channel of type `standard_artifacts.ModelBlessing`, usually produced by a ModelValidator component. data_spec: bulk_inferrer_pb2.DataSpec instance that describes data selection. If any field is provided as a RuntimeParameter, data_spec should be constructed as a dict with the same field names as DataSpec proto message. model_spec: bulk_inferrer_pb2.ModelSpec instance that describes model specification. If any field is provided as a RuntimeParameter, model_spec should be constructed as a dict with the same field names as ModelSpec proto message. inference_result: Channel of type `standard_artifacts.InferenceResult` to store the inference results. instance_name: Optional name assigned to this specific instance of BulkInferrer. Required only if multiple BulkInferrer components are declared in the same pipeline. enable_cache: Optional boolean to indicate if cache is enabled for the BulkInferrer component. If not specified, defaults to the value specified for pipeline's enable_cache parameter. """ inference_result = inference_result or types.Channel( type=standard_artifacts.InferenceResult, artifacts=[standard_artifacts.InferenceResult()]) spec = BulkInferrerSpec(examples=examples, model=model, model_blessing=model_blessing, data_spec=data_spec or bulk_inferrer_pb2.DataSpec(), model_spec=model_spec or bulk_inferrer_pb2.ModelSpec(), inference_result=inference_result) super(BulkInferrer, self).__init__(spec=spec, instance_name=instance_name, enable_cache=enable_cache)
def __init__(self, examples: types.Channel = None, model_export: Optional[types.Channel] = None, model_blessing: Optional[types.Channel] = None, model_push: Optional[types.Channel] = None, data_spec: Optional[bulk_inferrer_pb2.DataSpec] = None, model_spec: Optional[bulk_inferrer_pb2.ModelSpec] = None, output: Optional[types.Channel] = None, instance_name: Optional[Text] = None): """Construct an BulkInferrer component. Args: examples: A Channel of 'ExamplesPath' type, usually produced by ExampleGen component. _required_ model_export: A Channel of 'ModelExportPath' type, usually produced by Trainer component. model_blessing: A Channel of 'ModelBlessingPath' type, usually produced by Model Validator component. model_push: A Channel of 'PushedModel' type, usually produced by Pusher component. data_spec: bulk_inferrer_pb2.DataSpec instance that describes data selection. model_spec: bulk_inferrer_pb2.ModelSpec instance that describes model specification. output: Channel of `InferenceResult` to store the inference results. instance_name: Optional name assigned to this specific instance of BulkInferrer. Required only if multiple BulkInferrer components are declared in the same pipeline. """ output = output or types.Channel( type=standard_artifacts.InferenceResult, artifacts=[standard_artifacts.InferenceResult()]) spec = BulkInferrerSpec(examples=examples, model_export=model_export, model_blessing=model_blessing, model_push=model_push, data_spec=data_spec or bulk_inferrer_pb2.DataSpec(), model_spec=model_spec or bulk_inferrer_pb2.ModelSpec(), output=output) super(BulkInferrer, self).__init__(spec=spec, instance_name=instance_name)
def __init__(self, examples: types.Channel = None, model: Optional[types.Channel] = None, model_blessing: Optional[types.Channel] = None, data_spec: Optional[Union[bulk_inferrer_pb2.DataSpec, Dict[Text, Any]]] = None, model_spec: Optional[Union[bulk_inferrer_pb2.ModelSpec, Dict[Text, Any]]] = None, output_example_spec: Optional[Union[ bulk_inferrer_pb2.OutputExampleSpec, Dict[Text, Any]]] = None, inference_result: Optional[types.Channel] = None, output_examples: Optional[types.Channel] = None, instance_name: Optional[Text] = None): """Construct an BulkInferrer component. Args: examples: A Channel of type `standard_artifacts.Examples`, usually produced by an ExampleGen component. _required_ model: A Channel of type `standard_artifacts.Model`, usually produced by a Trainer component. model_blessing: A Channel of type `standard_artifacts.ModelBlessing`, usually produced by a ModelValidator component. data_spec: bulk_inferrer_pb2.DataSpec instance that describes data selection. If any field is provided as a RuntimeParameter, data_spec should be constructed as a dict with the same field names as DataSpec proto message. model_spec: bulk_inferrer_pb2.ModelSpec instance that describes model specification. If any field is provided as a RuntimeParameter, model_spec should be constructed as a dict with the same field names as ModelSpec proto message. output_example_spec: bulk_inferrer_pb2.OutputExampleSpec instance, specify if you want BulkInferrer to output examples instead of inference result. If any field is provided as a RuntimeParameter, output_example_spec should be constructed as a dict with the same field names as OutputExampleSpec proto message. inference_result: Channel of type `standard_artifacts.InferenceResult` to store the inference results, must not be specified when output_example_spec is set. output_examples: Channel of type `standard_artifacts.Examples` to store the output examples, must not be specified when output_example_spec is unset. Check output_example_spec for details. instance_name: Optional name assigned to this specific instance of BulkInferrer. Required only if multiple BulkInferrer components are declared in the same pipeline. Raises: ValueError: Must not specify inference_result or output_examples depends on whether output_example_spec is set or not. """ if output_example_spec: if inference_result: raise ValueError( 'Must not specify inference_result when output_example_spec is set.' ) output_examples = output_examples or types.Channel( type=standard_artifacts.Examples) else: if output_examples: raise ValueError( 'Must not specify output_examples when output_example_spec is unset.' ) inference_result = inference_result or types.Channel( type=standard_artifacts.InferenceResult) spec = BulkInferrerSpec(examples=examples, model=model, model_blessing=model_blessing, data_spec=data_spec or bulk_inferrer_pb2.DataSpec(), model_spec=model_spec or bulk_inferrer_pb2.ModelSpec(), output_example_spec=output_example_spec, inference_result=inference_result, output_examples=output_examples) super(BulkInferrer, self).__init__(spec=spec, instance_name=instance_name)