class HelloComponentSpec(types.ComponentSpec): """ComponentSpec for Custom TFX Hello World Component.""" PARAMETERS = { # These are parameters that will be passed in the call to # create an instance of this component. 'name': ExecutionParameter(type=Text), } INPUTS = { # This will be a dictionary with input artifacts, including URIs 'input_data': ChannelParameter(type=standard_artifacts.Examples), } OUTPUTS = { # This will be a dictionary which this component will populate 'output_data': ChannelParameter(type=standard_artifacts.Examples), }
class TransformSpec(ComponentSpec): """Transform component spec.""" PARAMETERS = { MODULE_FILE_KEY: ExecutionParameter(type=str, optional=True), MODULE_PATH_KEY: ExecutionParameter(type=str, optional=True), PREPROCESSING_FN_KEY: ExecutionParameter(type=str, optional=True), FORCE_TF_COMPAT_V1_KEY: ExecutionParameter(type=int, optional=True), CUSTOM_CONFIG_KEY: ExecutionParameter(type=str, optional=True), SPLITS_CONFIG_KEY: ExecutionParameter(type=transform_pb2.SplitsConfig, optional=True), DISABLE_STATISTICS_KEY: ExecutionParameter(type=int, optional=True), } INPUTS = { EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples), SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema), ANALYZER_CACHE_KEY: ChannelParameter(type=standard_artifacts.TransformCache, optional=True), } OUTPUTS = { TRANSFORM_GRAPH_KEY: ChannelParameter(type=standard_artifacts.TransformGraph), TRANSFORMED_EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples, optional=True), UPDATED_ANALYZER_CACHE_KEY: ChannelParameter(type=standard_artifacts.TransformCache, optional=True), PRE_TRANSFORM_SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema, optional=True), PRE_TRANSFORM_STATS_KEY: ChannelParameter(type=standard_artifacts.ExampleStatistics, optional=True), POST_TRANSFORM_SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema, optional=True), POST_TRANSFORM_STATS_KEY: ChannelParameter(type=standard_artifacts.ExampleStatistics, optional=True), POST_TRANSFORM_ANOMALIES_KEY: ChannelParameter(type=standard_artifacts.ExampleAnomalies, optional=True) }
class ExampleValidatorSpec(ComponentSpec): """ExampleValidator component spec.""" PARAMETERS = { EXCLUDE_SPLITS_KEY: ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { STATISTICS_KEY: ChannelParameter(type=standard_artifacts.ExampleStatistics), SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema), } OUTPUTS = { ANOMALIES_KEY: ChannelParameter(type=standard_artifacts.ExampleAnomalies), }
class MetaFeatureGenSpec(ComponentSpec): """MetaFeatureGenSpec component spec.""" PARAMETERS = { 'custom_config': ExecutionParameter(type=Dict[str, Any], optional=True), } INPUTS = { executor.STATISTICS_KEY: ChannelParameter(type=standard_artifacts.ExampleStatistics), executor.EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples, optional=True), } OUTPUTS = { 'metafeatures': ChannelParameter(type=artifacts.MetaFeatures), }
class ExportToTFServingSpec(types.ComponentSpec): PARAMETERS = { 'train_config': ExecutionParameter(type=Dict), } INPUTS = { 'checkpoint_dir': ChannelParameter(type=standard_artifacts.ExternalArtifact), 'model_path': ChannelParameter(type=standard_artifacts.ExternalArtifact), } OUTPUTS = { 'export_dir': ChannelParameter(type=standard_artifacts.ExternalArtifact), }
class TCGAPreprocessingSpec(ComponentSpec): """TFX Custom TCGAPreprocessing component spec.""" PARAMETERS = { 'query': ExecutionParameter(type=Text), 'output_schema': ExecutionParameter(type=Any), 'table_name': ExecutionParameter(type=Text), 'use_bigquery_source': ExecutionParameter(type=Any), # defaults args used by TFX 'input_config': ExecutionParameter(type=example_gen_pb2.Input), 'output_config': ExecutionParameter(type=example_gen_pb2.Output), 'input_base': ExecutionParameter(type=(str, Text)) } INPUTS = {} OUTPUTS = {'examples': ChannelParameter(type=standard_artifacts.Examples)}
class InfraValidatorSpec(ComponentSpec): """InfraValidator component spec.""" PARAMETERS = { 'serving_spec': ExecutionParameter(type=infra_validator_pb2.ServingSpec) } INPUTS = { 'model': ChannelParameter(type=standard_artifacts.Model), 'examples': ChannelParameter(type=standard_artifacts.Examples, optional=True), } OUTPUTS = { 'blessing': ChannelParameter(type=standard_artifacts.InfraBlessing), }
class MongoExportSpec(types.ComponentSpec): PARAMETERS = { 'ip': ExecutionParameter(type=Text), 'port': ExecutionParameter(type=Text), 'username': ExecutionParameter(type=Text), 'password': ExecutionParameter(type=Text), 'dbname': ExecutionParameter(type=Text), 'colnames': ExecutionParameter(type=List), 'end_token': ExecutionParameter(type=Text), } INPUTS = {} OUTPUTS = { 'merged_text_dir': ChannelParameter(type=standard_artifacts.ExternalArtifact), }
class SchemaGenSpec(ComponentSpec): """SchemaGen component spec.""" PARAMETERS = {'infer_feature_shape': ExecutionParameter(type=bool)} INPUTS = { # TODO(b/139281215): this will be renamed to 'statistics' in the future. 'stats': ChannelParameter(type=standard_artifacts.ExampleStatistics), } OUTPUTS = { 'output': ChannelParameter(type=standard_artifacts.Schema), } # TODO(b/139281215): these input / output names will be renamed in the future. # These compatibility aliases are provided for forwards compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'statistics': 'stats', } _OUTPUT_COMPATIBILITY_ALIASES = { 'schema': 'output', }
class EvaluatorSpec(ComponentSpec): """Evaluator component spec.""" PARAMETERS = { 'feature_slicing_spec': ExecutionParameter(type=evaluator_pb2.FeatureSlicingSpec), } INPUTS = { 'examples': ChannelParameter(type=standard_artifacts.Examples), # TODO(b/139281215): this will be renamed to 'model' in the future. 'model_exports': ChannelParameter(type=standard_artifacts.Model), } OUTPUTS = { 'output': ChannelParameter(type=standard_artifacts.ModelEvaluation), } # TODO(b/139281215): these input names will be renamed in the future. # These compatibility aliases are provided for forwards compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'model': 'model_exports', }
class OldNewsImportSpec(types.ComponentSpec): PARAMETERS = { 'ip': ExecutionParameter(type=Text), 'port': ExecutionParameter(type=Text), 'username': ExecutionParameter(type=Text), 'password': ExecutionParameter(type=Text), 'dbname': ExecutionParameter(type=Text), 'backup_dir': ExecutionParameter(type=Text), } INPUTS = { } OUTPUTS = { }
class MongoImportSpec(types.ComponentSpec): PARAMETERS = { 'ip': ExecutionParameter(type=Text), 'port': ExecutionParameter(type=Text), 'username': ExecutionParameter(type=Text), 'password': ExecutionParameter(type=Text), 'dbname': ExecutionParameter(type=Text), 'colname': ExecutionParameter(type=Text), } INPUTS = { 'rss_feed': ChannelParameter(type=standard_artifacts.ExternalArtifact), } OUTPUTS = {}
class SchemaGenSpec(ComponentSpec): """SchemaGen component spec.""" PARAMETERS = { 'infer_feature_shape': ExecutionParameter(type=bool, optional=True) } INPUTS = { 'statistics': ChannelParameter(type=standard_artifacts.ExampleStatistics), } OUTPUTS = { 'schema': ChannelParameter(type=standard_artifacts.Schema), } # TODO(b/139281215): these input / output names have recently been renamed. # These compatibility aliases are temporarily provided for backwards # compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'stats': 'statistics', } _OUTPUT_COMPATIBILITY_ALIASES = { 'output': 'schema', }
class ExampleValidatorSpec(ComponentSpec): """ExampleValidator component spec.""" PARAMETERS = { 'exclude_splits': ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { 'statistics': ChannelParameter(type=standard_artifacts.ExampleStatistics), 'schema': ChannelParameter(type=standard_artifacts.Schema), } OUTPUTS = { 'anomalies': ChannelParameter(type=standard_artifacts.ExampleAnomalies), } # TODO(b/139281215): these input / output names have recently been renamed. # These compatibility aliases are temporarily provided for backwards # compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'stats': 'statistics', } _OUTPUT_COMPATIBILITY_ALIASES = { 'output': 'anomalies', }
class EvaluatorSpec(ComponentSpec): """Evaluator component spec.""" PARAMETERS = { 'eval_config': ExecutionParameter(type=tfma.EvalConfig, optional=True), # TODO(mdreves): Deprecated, use eval_config.slicing_specs. 'feature_slicing_spec': ExecutionParameter(type=evaluator_pb2.FeatureSlicingSpec, optional=True), # This parameter is experimental: its interface and functionality may # change at any time. 'fairness_indicator_thresholds': ExecutionParameter(type=List[float], optional=True), 'example_splits': ExecutionParameter(type=(str, Text), optional=True), 'module_file': ExecutionParameter(type=(str, Text), optional=True), 'module_path': ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { 'examples': ChannelParameter(type=standard_artifacts.Examples), 'model': ChannelParameter(type=standard_artifacts.Model), 'baseline_model': ChannelParameter(type=standard_artifacts.Model, optional=True), 'schema': ChannelParameter(type=standard_artifacts.Schema, optional=True), } OUTPUTS = { 'evaluation': ChannelParameter(type=standard_artifacts.ModelEvaluation), 'blessing': ChannelParameter(type=standard_artifacts.ModelBlessing), } # TODO(b/139281215): these input / output names have recently been renamed. # These compatibility aliases are temporarily provided for backwards # compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'model_exports': 'model', } _OUTPUT_COMPATIBILITY_ALIASES = { 'output': 'evaluation', }
class ExportSpec(types.ComponentSpec): """ComponentSpec for model Export Component.""" PARAMETERS = { FUNCTION_NAME_KEY: ExecutionParameter(type=Text), } INPUTS = { MODEL_KEY: ChannelParameter(type=standard_artifacts.Model), PIPELINE_CONFIGURATION_KEY: ChannelParameter(type=PipelineConfiguration, optional=True), MODEL_BLESSING_KEY: ChannelParameter(type=standard_artifacts.ModelBlessing, optional=True), INFRA_BLESSING_KEY: ChannelParameter(type=standard_artifacts.InfraBlessing, optional=True), PUSHED_MODEL_KEY: ChannelParameter(type=standard_artifacts.PushedModel, optional=True), TRANSFORM_GRAPH_KEY: ChannelParameter(type=standard_artifacts.TransformGraph, optional=True), } OUTPUTS = { OUTPUT_KEY: ChannelParameter(type=ExportedModel), }
class FileBasedExampleGenSpec(ComponentSpec): """File-based ExampleGen component spec.""" PARAMETERS = { INPUT_BASE_KEY: ExecutionParameter(type=str), INPUT_CONFIG_KEY: ExecutionParameter(type=example_gen_pb2.Input), OUTPUT_CONFIG_KEY: ExecutionParameter(type=example_gen_pb2.Output), OUTPUT_DATA_FORMAT_KEY: ExecutionParameter(type=int), # example_gen_pb2.PayloadFormat enum. OUTPUT_FILE_FORMAT_KEY: ExecutionParameter(type=int), # example_gen_pb2.FileFormat enum. CUSTOM_CONFIG_KEY: ExecutionParameter(type=example_gen_pb2.CustomConfig, optional=True), RANGE_CONFIG_KEY: ExecutionParameter(type=range_config_pb2.RangeConfig, optional=True), } INPUTS = {} OUTPUTS = { EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples), }
class FileBasedExampleGenSpec(ComponentSpec): """File-based ExampleGen component spec.""" PARAMETERS = { 'input_base': ExecutionParameter(type=(str, Text)), 'input_config': ExecutionParameter(type=example_gen_pb2.Input), 'output_config': ExecutionParameter(type=example_gen_pb2.Output), 'output_data_format': ExecutionParameter(type=int), # example_gen_pb2.PayloadType enum. 'custom_config': ExecutionParameter(type=example_gen_pb2.CustomConfig, optional=True), 'range_config': ExecutionParameter(type=range_config_pb2.RangeConfig, optional=True), } INPUTS = {} OUTPUTS = { 'examples': ChannelParameter(type=standard_artifacts.Examples), }
class TrainerSpec(ComponentSpec): """Trainer component spec.""" PARAMETERS = { 'train_args': ExecutionParameter(type=trainer_pb2.TrainArgs), 'eval_args': ExecutionParameter(type=trainer_pb2.EvalArgs), 'module_file': ExecutionParameter(type=(str, Text), optional=True), 'run_fn': ExecutionParameter(type=(str, Text), optional=True), 'trainer_fn': ExecutionParameter(type=(str, Text), optional=True), 'custom_config': ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { 'examples': ChannelParameter(type=standard_artifacts.Examples), 'transform_graph': ChannelParameter(type=standard_artifacts.TransformGraph, optional=True), 'schema': ChannelParameter(type=standard_artifacts.Schema, optional=True), 'base_model': ChannelParameter(type=standard_artifacts.Model, optional=True), 'hyperparameters': ChannelParameter(type=standard_artifacts.HyperParameters, optional=True), } OUTPUTS = { 'model': ChannelParameter(type=standard_artifacts.Model), 'model_run': ChannelParameter(type=standard_artifacts.ModelRun) } # TODO(b/139281215): these input / output names have recently been renamed. # These compatibility aliases are temporarily provided for backwards # compatibility. _INPUT_COMPATIBILITY_ALIASES = { 'transform_output': 'transform_graph', } _OUTPUT_COMPATIBILITY_ALIASES = { 'output': 'model', }
class ZenMLTrainerSpec(ComponentSpec): PARAMETERS = { 'train_args': ExecutionParameter(type=trainer_pb2.TrainArgs), 'eval_args': ExecutionParameter(type=trainer_pb2.EvalArgs), 'module_file': ExecutionParameter(type=(str, Text), optional=True), 'run_fn': ExecutionParameter(type=(str, Text), optional=True), 'trainer_fn': ExecutionParameter(type=(str, Text), optional=True), 'custom_config': ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { 'examples': ChannelParameter(type=Examples), 'schema': ChannelParameter(type=Schema, optional=True), 'base_model': ChannelParameter(type=Model, optional=True), 'transform_graph': ChannelParameter(type=TransformGraph, optional=True), 'hyperparameters': ChannelParameter(type=HyperParameters, optional=True), } OUTPUTS = { 'model': ChannelParameter(type=Model), 'model_run': ChannelParameter(type=ModelRun), constants.TEST_RESULTS: ChannelParameter(type=Examples) }
class EvaluatorSpec(ComponentSpec): """Evaluator component spec.""" PARAMETERS = { EVAL_CONFIG_KEY: ExecutionParameter(type=tfma.EvalConfig, optional=True), # TODO(b/181911822): Deprecated, use eval_config.slicing_specs. FEATURE_SLICING_SPEC_KEY: ExecutionParameter(type=evaluator_pb2.FeatureSlicingSpec, optional=True), # This parameter is experimental: its interface and functionality may # change at any time. FAIRNESS_INDICATOR_THRESHOLDS_KEY: ExecutionParameter(type=str, optional=True), EXAMPLE_SPLITS_KEY: ExecutionParameter(type=str, optional=True), MODULE_FILE_KEY: ExecutionParameter(type=str, optional=True), MODULE_PATH_KEY: ExecutionParameter(type=str, optional=True), } INPUTS = { EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples), MODEL_KEY: ChannelParameter(type=standard_artifacts.Model, optional=True), BASELINE_MODEL_KEY: ChannelParameter(type=standard_artifacts.Model, optional=True), SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema, optional=True), } OUTPUTS = { EVALUATION_KEY: ChannelParameter(type=standard_artifacts.ModelEvaluation), BLESSING_KEY: ChannelParameter(type=standard_artifacts.ModelBlessing), } TYPE_ANNOTATION = Evaluate
class TunerSpec(ComponentSpec): """ComponentSpec for TFX Tuner Component.""" PARAMETERS = { 'module_file': ExecutionParameter(type=(str, Text), optional=True), 'tuner_fn': ExecutionParameter(type=(str, Text), optional=True), 'train_args': ExecutionParameter(type=trainer_pb2.TrainArgs), 'eval_args': ExecutionParameter(type=trainer_pb2.EvalArgs), 'tune_args': ExecutionParameter(type=tuner_pb2.TuneArgs, optional=True), 'custom_config': ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { 'examples': ChannelParameter(type=standard_artifacts.Examples), 'schema': ChannelParameter(type=standard_artifacts.Schema, optional=True), 'transform_graph': ChannelParameter( type=standard_artifacts.TransformGraph, optional=True), } OUTPUTS = { 'best_hyperparameters': ChannelParameter(type=standard_artifacts.HyperParameters), }
class TunerSpec(ComponentSpec): """ComponentSpec for TFX Tuner Component.""" PARAMETERS = { MODULE_FILE_KEY: ExecutionParameter(type=(str, Text), optional=True), TUNER_FN_KEY: ExecutionParameter(type=(str, Text), optional=True), TRAIN_ARGS_KEY: ExecutionParameter(type=trainer_pb2.TrainArgs), EVAL_ARGS_KEY: ExecutionParameter(type=trainer_pb2.EvalArgs), TUNE_ARGS_KEY: ExecutionParameter(type=tuner_pb2.TuneArgs, optional=True), CUSTOM_CONFIG_KEY: ExecutionParameter(type=(str, Text), optional=True), } INPUTS = { EXAMPLES_KEY: ChannelParameter(type=standard_artifacts.Examples), SCHEMA_KEY: ChannelParameter(type=standard_artifacts.Schema, optional=True), TRANSFORM_GRAPH_KEY: ChannelParameter( type=standard_artifacts.TransformGraph, optional=True), } OUTPUTS = { BEST_HYPERPARAMETERS_KEY: ChannelParameter(type=standard_artifacts.HyperParameters), }
class _FakeComponentSpecC(types.ComponentSpec): PARAMETERS = {_COMPONENT_NAME: ExecutionParameter(type=str)} INPUTS = {'a': ChannelParameter(type=_ArtifactTypeA)} OUTPUTS = {'output': ChannelParameter(type=_ArtifactTypeC)}
def testExecutionParameterTypeCheck(self): int_parameter = ExecutionParameter(type=int) int_parameter.type_check('int_parameter', 8) with self.assertRaisesRegex( TypeError, "Expected type <(class|type) 'int'>" " for parameter u?'int_parameter'"): int_parameter.type_check('int_parameter', 'string') list_parameter = ExecutionParameter(type=List[int]) list_parameter.type_check('list_parameter', []) list_parameter.type_check('list_parameter', [42]) with self.assertRaisesRegex(TypeError, 'Expecting a list for parameter'): list_parameter.type_check('list_parameter', 42) with self.assertRaisesRegex( TypeError, "Expecting item type <(class|type) " "'int'> for parameter u?'list_parameter'"): list_parameter.type_check('list_parameter', [42, 'wrong item']) dict_parameter = ExecutionParameter(type=Dict[str, int]) dict_parameter.type_check('dict_parameter', {}) dict_parameter.type_check('dict_parameter', {'key1': 1, 'key2': 2}) with self.assertRaisesRegex(TypeError, 'Expecting a dict for parameter'): dict_parameter.type_check('dict_parameter', 'simple string') with self.assertRaisesRegex( TypeError, "Expecting value type " "<(class|type) 'int'>"): dict_parameter.type_check('dict_parameter', {'key1': '1'}) proto_parameter = ExecutionParameter(type=example_gen_pb2.Input) proto_parameter.type_check('proto_parameter', example_gen_pb2.Input()) proto_parameter.type_check( 'proto_parameter', proto_utils.proto_to_json(example_gen_pb2.Input())) proto_parameter.type_check('proto_parameter', {'splits': [{ 'name': 'hello' }]}) proto_parameter.type_check('proto_parameter', {'wrong_field': 42}) with self.assertRaisesRegex( TypeError, "Expected type <class 'tfx.proto.example_gen_pb2.Input'>"): proto_parameter.type_check('proto_parameter', 42) with self.assertRaises(json_format.ParseError): proto_parameter.type_check('proto_parameter', {'splits': 42}) output_channel = channel.Channel(type=_OutputArtifact) placeholder_parameter = ExecutionParameter(type=str) placeholder_parameter.type_check( 'wrapped_channel_placeholder_parameter', output_channel.future()[0].value) placeholder_parameter.type_check( 'placeholder_parameter', placeholder.runtime_info('platform_config').base_dir) with self.assertRaisesRegex( TypeError, 'Only simple RuntimeInfoPlaceholders are supported'): placeholder_parameter.type_check( 'placeholder_parameter', placeholder.runtime_info('platform_config').base_dir + placeholder.exec_property('version'))
def testExecutionParameterTypeCheck(self): int_parameter = ExecutionParameter(type=int) int_parameter.type_check('int_parameter', 8) with self.assertRaisesRegex( TypeError, "Expected type <(class|type) 'int'>" " for parameter u?'int_parameter'"): int_parameter.type_check('int_parameter', 'string') list_parameter = ExecutionParameter(type=List[int]) list_parameter.type_check('list_parameter', []) list_parameter.type_check('list_parameter', [42]) with self.assertRaisesRegex(TypeError, 'Expecting a list for parameter'): list_parameter.type_check('list_parameter', 42) with self.assertRaisesRegex( TypeError, "Expecting item type <(class|type) " "'int'> for parameter u?'list_parameter'"): list_parameter.type_check('list_parameter', [42, 'wrong item']) dict_parameter = ExecutionParameter(type=Dict[str, int]) dict_parameter.type_check('dict_parameter', {}) dict_parameter.type_check('dict_parameter', {'key1': 1, 'key2': 2}) with self.assertRaisesRegex(TypeError, 'Expecting a dict for parameter'): dict_parameter.type_check('dict_parameter', 'simple string') with self.assertRaisesRegex( TypeError, "Expecting value type " "<(class|type) 'int'>"): dict_parameter.type_check('dict_parameter', {'key1': '1'}) proto_parameter = ExecutionParameter(type=example_gen_pb2.Input) proto_parameter.type_check('proto_parameter', example_gen_pb2.Input()) proto_parameter.type_check('proto_parameter', {'splits': [{ 'name': 'hello' }]}) proto_parameter.type_check('proto_parameter', {'wrong_field': 42}) with self.assertRaisesRegex( TypeError, "Expected type <class 'tfx.proto.example_gen_pb2.Input'>"): proto_parameter.type_check('proto_parameter', 42) with self.assertRaises(json_format.ParseError): proto_parameter.type_check('proto_parameter', {'splits': 42})
def testExecutionParameterTypeCheck(self): int_parameter = ExecutionParameter(type=int) int_parameter.type_check('int_parameter', 8) with self.assertRaisesRegexp( TypeError, "Expected type <(class|type) 'int'>" " for parameter u?'int_parameter'"): int_parameter.type_check('int_parameter', 'string') list_parameter = ExecutionParameter(type=List[int]) list_parameter.type_check('list_parameter', []) list_parameter.type_check('list_parameter', [42]) with self.assertRaisesRegexp(TypeError, 'Expecting a list for parameter'): list_parameter.type_check('list_parameter', 42) with self.assertRaisesRegexp( TypeError, "Expecting item type <(class|type) " "'int'> for parameter u?'list_parameter'"): list_parameter.type_check('list_parameter', [42, 'wrong item']) dict_parameter = ExecutionParameter(type=Dict[Text, int]) dict_parameter.type_check('dict_parameter', {}) dict_parameter.type_check('dict_parameter', {'key1': 1, 'key2': 2}) with self.assertRaisesRegexp(TypeError, 'Expecting a dict for parameter'): dict_parameter.type_check('dict_parameter', 'simple string') with self.assertRaisesRegexp( TypeError, "Expecting value type " "<(class|type) 'int'>"): dict_parameter.type_check('dict_parameter', {'key1': '1'})
class DuplicatePropertyComponentSpec(ComponentSpec): PARAMETERS = {'x': ExecutionParameter(type=int)} INPUTS = {'x': ChannelParameter(type_name='X')} OUTPUTS = {}
class WrongTypeComponentSpecD(ComponentSpec): PARAMETERS = {} INPUTS = {'x': ExecutionParameter(type=int)} OUTPUTS = {}