def test_type_engine(): t = int lt = TypeEngine.to_literal_type(t) assert lt.simple == model_types.SimpleType.INTEGER t = typing.Dict[str, typing.List[typing.Dict[str, timedelta]]] lt = TypeEngine.to_literal_type(t) assert lt.map_value_type.collection_type.map_value_type.simple == model_types.SimpleType.DURATION
def __call__(self, *args, **kwargs): if len(args) > 0: raise AssertionError("Only Keyword Arguments are supported for Workflow executions") ctx = FlyteContext.current_context() # Handle subworkflows in compilation if ctx.compilation_state is not None: input_kwargs = self._native_interface.default_inputs_as_kwargs input_kwargs.update(kwargs) return create_and_link_node(ctx, entity=self, interface=self._native_interface, **input_kwargs) elif ( ctx.execution_state is not None and ctx.execution_state.mode == ExecutionState.Mode.LOCAL_WORKFLOW_EXECUTION ): # We are already in a local execution, just continue the execution context return self._local_execute(ctx, **kwargs) # When someone wants to run the workflow function locally. Assume that the inputs given are given as Python # native values. _local_execute will always translate Python native literals to Flyte literals, so no worries # there, but it'll return Promise objects. else: # Run some sanity checks # Even though the _local_execute call generally expects inputs to be Promises, we don't have to do the # conversion here in this loop. The reason is because we don't prevent users from specifying inputs # as direct scalars, which means there's another Promise-generating loop inside _local_execute too for k, v in kwargs.items(): if k not in self.interface.inputs: raise ValueError(f"Received unexpected keyword argument {k}") if isinstance(v, Promise): raise ValueError(f"Received a promise for a workflow call, when expecting a native value for {k}") with ctx.new_execution_context(mode=ExecutionState.Mode.LOCAL_WORKFLOW_EXECUTION) as ctx: result = self._local_execute(ctx, **kwargs) expected_outputs = len(self._native_interface.outputs) if expected_outputs == 0: if result is None or isinstance(result, VoidPromise): return None else: raise Exception(f"Workflow local execution expected 0 outputs but something received {result}") if (expected_outputs > 1 and len(result) == expected_outputs) or ( expected_outputs == 1 and result is not None ): if isinstance(result, Promise): v = [v for k, v in self._native_interface.outputs.items()][0] return TypeEngine.to_python_value(ctx, result.val, v) else: for prom in result: if not isinstance(prom, Promise): raise Exception("should be promises") native_list = [ TypeEngine.to_python_value(ctx, promise.val, self._native_interface.outputs[promise.var]) for promise in result ] return tuple(native_list) raise ValueError("expected outputs and actual outputs do not match")
def binding_data_from_python_std( ctx: _flyte_context.FlyteContext, expected_literal_type: _type_models.LiteralType, t_value: typing.Any, t_value_type: type, ) -> _literals_models.BindingData: # This handles the case where the given value is the output of another task if isinstance(t_value, Promise): if not t_value.is_ready: return _literals_models.BindingData(promise=t_value.ref) elif isinstance(t_value, VoidPromise): raise AssertionError( f"Cannot pass output from task {t_value.task_name} that produces no outputs to a downstream task" ) elif isinstance(t_value, list): if expected_literal_type.collection_type is None: raise AssertionError( f"this should be a list and it is not: {type(t_value)} vs {expected_literal_type}" ) sub_type = ListTransformer.get_sub_type(t_value_type) collection = _literals_models.BindingDataCollection(bindings=[ binding_data_from_python_std( ctx, expected_literal_type.collection_type, t, sub_type) for t in t_value ]) return _literals_models.BindingData(collection=collection) elif isinstance(t_value, dict): if (expected_literal_type.map_value_type is None and expected_literal_type.simple != _type_models.SimpleType.STRUCT): raise AssertionError( f"this should be a Dictionary type and it is not: {type(t_value)} vs {expected_literal_type}" ) k_type, v_type = DictTransformer.get_dict_types(t_value_type) if expected_literal_type.simple == _type_models.SimpleType.STRUCT: lit = TypeEngine.to_literal(ctx, t_value, type(t_value), expected_literal_type) return _literals_models.BindingData(scalar=lit.scalar) else: m = _literals_models.BindingDataMap( bindings={ k: binding_data_from_python_std( ctx, expected_literal_type.map_value_type, v, v_type) for k, v in t_value.items() }) return _literals_models.BindingData(map=m) # This is the scalar case - e.g. my_task(in1=5) scalar = TypeEngine.to_literal(ctx, t_value, t_value_type, expected_literal_type).scalar return _literals_models.BindingData(scalar=scalar)
def extract_value( ctx: FlyteContext, input_val: Any, val_type: type, flyte_literal_type: _type_models.LiteralType ) -> _literal_models.Literal: if isinstance(input_val, list): if flyte_literal_type.collection_type is None: raise Exception( f"Not a collection type {flyte_literal_type} but got a list {input_val}" ) try: sub_type = ListTransformer.get_sub_type(val_type) except ValueError: if len(input_val) == 0: raise sub_type = type(input_val[0]) literals = [ extract_value(ctx, v, sub_type, flyte_literal_type.collection_type) for v in input_val ] return _literal_models.Literal( collection=_literal_models.LiteralCollection( literals=literals)) elif isinstance(input_val, dict): if (flyte_literal_type.map_value_type is None and flyte_literal_type.simple != _type_models.SimpleType.STRUCT): raise Exception( f"Not a map type {flyte_literal_type} but got a map {input_val}" ) k_type, sub_type = DictTransformer.get_dict_types(val_type) if flyte_literal_type.simple == _type_models.SimpleType.STRUCT: return TypeEngine.to_literal(ctx, input_val, type(input_val), flyte_literal_type) else: literals = { k: extract_value(ctx, v, sub_type, flyte_literal_type.map_value_type) for k, v in input_val.items() } return _literal_models.Literal(map=_literal_models.LiteralMap( literals=literals)) elif isinstance(input_val, Promise): # In the example above, this handles the "in2=a" type of argument return input_val.val elif isinstance(input_val, VoidPromise): raise AssertionError( f"Outputs of a non-output producing task {input_val.task_name} cannot be passed to another task." ) else: # This handles native values, the 5 example return TypeEngine.to_literal(ctx, input_val, val_type, flyte_literal_type)
def test_comparison_lits(): px = Promise("x", TypeEngine.to_literal(None, 5, int, None)) py = Promise("y", TypeEngine.to_literal(None, 8, int, None)) def eval_expr(expr, expected: bool): print(f"{expr} evals to {expr.eval()}") assert expected == expr.eval() eval_expr(px == py, False) eval_expr(px < py, True) eval_expr((px == py) & (px < py), False) eval_expr(((px == py) & (px < py)) | (px > py), False) eval_expr(px < 5, False) eval_expr(px >= 5, True) eval_expr(py >= 5, True)
def test_named_tuple(): t = typing.NamedTuple("Outputs", [("x_str", str), ("y_int", int)]) var_map = TypeEngine.named_tuple_to_variable_map(t) assert var_map.variables[ "x_str"].type.simple == model_types.SimpleType.STRING assert var_map.variables[ "y_int"].type.simple == model_types.SimpleType.INTEGER
def _workflow_fn_outputs_to_promise( ctx: FlyteContext, native_outputs: typing.Dict[str, type], # Actually an orderedDict typed_outputs: Dict[str, _interface_models.Variable], outputs: Union[Any, Tuple[Any]], ) -> List[Promise]: if len(native_outputs) == 1: if isinstance(outputs, tuple): if len(outputs) != 1: raise AssertionError( f"The Workflow specification indicates only one return value, received {len(outputs)}" ) else: outputs = (outputs,) if len(native_outputs) > 1: if not isinstance(outputs, tuple) or len(native_outputs) != len(outputs): # Length check, clean up exception raise AssertionError( f"The workflow specification indicates {len(native_outputs)} return vals, but received {len(outputs)}" ) # This recasts the Promises provided by the outputs of the workflow's tasks into the correct output names # of the workflow itself return_vals = [] for (k, t), v in zip(native_outputs.items(), outputs): if isinstance(v, Promise): return_vals.append(v.with_var(k)) else: # Found a return type that is not a promise, so we need to transform it var = typed_outputs[k] return_vals.append(Promise(var=k, val=TypeEngine.to_literal(ctx, v, t, var.type))) return return_vals
def transform_inputs_to_parameters( ctx: context_manager.FlyteContext, interface: Interface) -> _interface_models.ParameterMap: """ Transforms the given interface (with inputs) to a Parameter Map with defaults set :param interface: the interface object """ if interface is None or interface.inputs_with_defaults is None: return _interface_models.ParameterMap({}) inputs_vars = transform_variable_map(interface.inputs) params = {} inputs_with_def = interface.inputs_with_defaults for k, v in inputs_vars.items(): val, _default = inputs_with_def[k] required = _default is None default_lv = None if _default is not None: default_lv = TypeEngine.to_literal(ctx, _default, python_type=interface.inputs[k], expected=v.type) params[k] = _interface_models.Parameter(var=v, default=default_lv, required=required) return _interface_models.ParameterMap(params)
def record_outputs(**kwargs) -> str: """ Use this method to record outputs from a notebook. It will convert all outputs to a Flyte understandable format. For Files, Directories, please use FlyteFile or FlyteDirectory, or wrap up your paths in these decorators. """ if kwargs is None: return "" m = {} ctx = FlyteContext.current_context() for k, v in kwargs.items(): expected = TypeEngine.to_literal_type(type(v)) lit = TypeEngine.to_literal(ctx, python_type=type(v), python_val=v, expected=expected) m[k] = lit return LiteralMap(literals=m).to_flyte_idl()
def test_engine_file_output(): basic_blob_type = _core_types.BlobType(format="", dimensionality=_core_types.BlobType.BlobDimensionality.SINGLE,) fs = FileAccessProvider(local_sandbox_dir="/tmp/flytetesting") with context_manager.FlyteContext.current_context().new_file_access_context(file_access_provider=fs) as ctx: # Write some text to a file not in that directory above test_file_location = "/tmp/sample.txt" with open(test_file_location, "w") as fh: fh.write("Hello World\n") lit = TypeEngine.to_literal(ctx, test_file_location, os.PathLike, LiteralType(blob=basic_blob_type)) # Since we're using local as remote, we should be able to just read the file from the 'remote' location. with open(lit.scalar.blob.uri, "r") as fh: assert fh.readline() == "Hello World\n" # We should also be able to turn the thing back into regular python native thing. redownloaded_local_file_location = TypeEngine.to_python_value(ctx, lit, os.PathLike) with open(redownloaded_local_file_location, "r") as fh: assert fh.readline() == "Hello World\n"
def test_type_resolution(): assert type(TypeEngine.get_transformer( typing.List[int])) == ListTransformer assert type(TypeEngine.get_transformer(typing.List)) == ListTransformer assert type(TypeEngine.get_transformer(list)) == ListTransformer assert type(TypeEngine.get_transformer( typing.Dict[str, int])) == DictTransformer assert type(TypeEngine.get_transformer(typing.Dict)) == DictTransformer assert type(TypeEngine.get_transformer(dict)) == DictTransformer assert type(TypeEngine.get_transformer(int)) == SimpleTransformer assert type(TypeEngine.get_transformer(os.PathLike)) == PathLikeTransformer
def unwrap_literal_map_and_execute( self, ctx: FlyteContext, input_literal_map: _literal_models.LiteralMap ) -> Union[VoidPromise, _literal_models.LiteralMap, _dynamic_job.DynamicJobSpec]: """ Please see the implementation of the dispatch_execute function in the real task. """ # Invoked before the task is executed # Translate the input literals to Python native native_inputs = TypeEngine.literal_map_to_kwargs(ctx, input_literal_map, self.interface.inputs) logger.info(f"Invoking {self.name} with inputs: {native_inputs}") try: native_outputs = self.execute(**native_inputs) except Exception as e: logger.exception(f"Exception when executing {e}") raise e logger.info(f"Task executed successfully in user level, outputs: {native_outputs}") expected_output_names = list(self.interface.outputs.keys()) if len(expected_output_names) == 1: native_outputs_as_map = {expected_output_names[0]: native_outputs} elif len(expected_output_names) == 0: native_outputs_as_map = {} else: native_outputs_as_map = {expected_output_names[i]: native_outputs[i] for i, _ in enumerate(native_outputs)} # We manually construct a LiteralMap here because task inputs and outputs actually violate the assumption # built into the IDL that all the values of a literal map are of the same type. literals = {} for k, v in native_outputs_as_map.items(): literal_type = self.typed_interface.outputs[k].type py_type = self.interface.outputs[k] if isinstance(v, tuple): raise AssertionError(f"Output({k}) in task{self.name} received a tuple {v}, instead of {py_type}") literals[k] = TypeEngine.to_literal(ctx, v, py_type, literal_type) outputs_literal_map = _literal_models.LiteralMap(literals=literals) # After the execute has been successfully completed return outputs_literal_map
def test_file_format_getting_python_value(): transformer = TypeEngine.get_transformer(FlyteFile) ctx = FlyteContext.current_context() # This file probably won't exist, but it's okay. It won't be downloaded unless we try to read the thing returned lv = Literal(scalar=Scalar(blob=Blob(metadata=BlobMetadata( type=BlobType(format="txt", dimensionality=0)), uri="file:///tmp/test"))) pv = transformer.to_python_value(ctx, lv, expected_python_type=FlyteFile["txt"]) assert isinstance(pv, FlyteFile) assert pv.extension() == "txt"
def test_file_formats_getting_literal_type(): transformer = TypeEngine.get_transformer(FlyteFile) lt = transformer.get_literal_type(FlyteFile) assert lt.blob.format == "" # Works with formats that we define lt = transformer.get_literal_type(FlyteFile["txt"]) assert lt.blob.format == "txt" lt = transformer.get_literal_type(FlyteFile[typing.TypeVar("jpg")]) assert lt.blob.format == "jpg" # Empty default to the default lt = transformer.get_literal_type(FlyteFile) assert lt.blob.format == "" lt = transformer.get_literal_type(FlyteFile[typing.TypeVar(".png")]) assert lt.blob.format == "png"
def execute(self, **kwargs) -> Any: """ TODO: Figure out how to share FlyteContext ExecutionParameters with the notebook kernel (as notebook kernel is executed in a separate python process) For Spark, the notebooks today need to use the new_session or just getOrCreate session and get a handle to the singleton """ logging.info( f"Hijacking the call for task-type {self.task_type}, to call notebook." ) # Execute Notebook via Papermill. pm.execute_notebook(self._notebook_path, self.output_notebook_path, parameters=kwargs) outputs = self.extract_outputs(self.output_notebook_path) self.render_nb_html(self.output_notebook_path, self.rendered_output_path) m = {} if outputs: m = outputs.literals output_list = [] for k, type_v in self.python_interface.outputs.items(): if k == self._IMPLICIT_OP_NOTEBOOK: output_list.append(self.output_notebook_path) elif k == self._IMPLICIT_RENDERED_NOTEBOOK: output_list.append(self.rendered_output_path) elif k in m: v = TypeEngine.to_python_value( ctx=FlyteContext.current_context(), lv=m[k], expected_python_type=type_v) output_list.append(v) else: raise RuntimeError( f"Expected output {k} of type {v} not found in the notebook outputs" ) return tuple(output_list)
def _local_execute(self, ctx: FlyteContext, **kwargs) -> Union[Tuple[Promise], Promise, VoidPromise]: """ Performs local execution of a workflow. kwargs are expected to be Promises for the most part (unless, someone has hardcoded in my_wf(input_1=5) or something). :param ctx: The FlyteContext :param kwargs: parameters for the workflow itself """ logger.info(f"Executing Workflow {self._name}, ctx{ctx.execution_state.Mode}") # This is done to support the invariant that Workflow local executions always work with Promise objects # holding Flyte literal values. Even in a wf, a user can call a sub-workflow with a Python native value. for k, v in kwargs.items(): if not isinstance(v, Promise): t = self._native_interface.inputs[k] kwargs[k] = Promise(var=k, val=TypeEngine.to_literal(ctx, v, t, self.interface.inputs[k].type)) function_outputs = self.execute(**kwargs) if ( isinstance(function_outputs, VoidPromise) or function_outputs is None or len(self.python_interface.outputs) == 0 ): # The reason this is here is because a workflow function may return a task that doesn't return anything # def wf(): # return t1() # or it may not return at all # def wf(): # t1() # In the former case we get the task's VoidPromise, in the latter we get None return VoidPromise(self.name) # TODO: Can we refactor the task code to be similar to what's in this function? promises = _workflow_fn_outputs_to_promise( ctx, self._native_interface.outputs, self.interface.outputs, function_outputs ) # TODO: With the native interface, create_task_output should be able to derive the typed interface, and it # should be able to do the conversion of the output of the execute() call directly. return create_task_output(promises, self._native_interface)
python_type: Type[pandas.DataFrame], expected: LiteralType, ) -> Literal: local_dir = ctx.file_access.get_random_local_directory() w = PandasSchemaWriter(local_dir=local_dir, cols=None, fmt=SchemaFormat.PARQUET) w.write(python_val) remote_path = ctx.file_access.get_random_remote_directory() ctx.file_access.put_data(local_dir, remote_path, is_multipart=True) return Literal(scalar=Scalar( schema=Schema(remote_path, self._get_schema_type()))) def to_python_value( self, ctx: FlyteContext, lv: Literal, expected_python_type: Type[pandas.DataFrame]) -> pandas.DataFrame: if not (lv and lv.scalar and lv.scalar.schema): return pandas.DataFrame() local_dir = ctx.file_access.get_random_local_directory() ctx.file_access.download_directory(lv.scalar.schema.uri, local_dir) r = PandasSchemaReader(local_dir=local_dir, cols=None, fmt=SchemaFormat.PARQUET) return r.all() SchemaEngine.register_handler( SchemaHandler("pandas-dataframe-schema", pandas.DataFrame, PandasSchemaReader, PandasSchemaWriter)) TypeEngine.register(PandasDataFrameTransformer())
) writer = schema.open(type(python_val)) writer.write(python_val) h = SchemaEngine.get_handler(type(python_val)) if not h.handles_remote_io: ctx.file_access.put_data(schema.local_path, schema.remote_path, is_multipart=True) return Literal(scalar=Scalar(schema=Schema( schema.remote_path, self._get_schema_type(python_type)))) def to_python_value( self, ctx: FlyteContext, lv: Literal, expected_python_type: Type[FlyteSchema]) -> FlyteSchema: if not (lv and lv.scalar and lv.scalar.schema): raise AssertionError( "Can only covert a literal schema to a FlyteSchema") def downloader(x, y): ctx.file_access.download_directory(x, y) return expected_python_type( local_path=ctx.file_access.get_random_local_directory(), remote_path=lv.scalar.schema.uri, downloader=downloader, supported_mode=SchemaOpenMode.READ, ) TypeEngine.register(FlyteSchemaTransformer())
def transform_type(x: type, description: str = None) -> _interface_models.Variable: return _interface_models.Variable(type=TypeEngine.to_literal_type(x), description=description)
self, ctx: FlyteContext, lv: Literal, expected_python_type: Type[MyDataset] ) -> MyDataset: """ In this function we want to be able to re-hydrate the custom object from Flyte Literal value """ # Step 1: lets download remote data locally local_dir = ctx.file_access.get_random_local_directory() ctx.file_access.download_directory(lv.scalar.blob.uri, local_dir) # Step 2: create the MyDataset object return MyDataset(base_dir=local_dir) # %% # Before we can use MyDataset in our tasks, we need to let flytekit know that ``MyDataset`` should be considered as a # valid type. This is done using the :py:func:`flytekit.annotated.type_engine.TypeEngine.register` function. TypeEngine.register(MyDatasetTransformer()) # %% # Now the new type should be ready to use. Let us write an example generator and consumer for this new datatype @task def generate() -> MyDataset: d = MyDataset() for i in range(3): fp = d.new_file(f"x{i}") with open(fp, "w") as f: f.write(f"Contents of file{i}") return d
def to_python_value( self, ctx: FlyteContext, lv: Literal, expected_python_type: typing.Type[FlyteDirectory] ) -> FlyteDirectory: uri = lv.scalar.blob.uri # This is a local file path, like /usr/local/my_file, don't mess with it. Certainly, downloading it doesn't # make any sense. if not ctx.file_access.is_remote(uri): return expected_python_type(uri) # For the remote case, return an FlyteDirectory object that can download local_folder = ctx.file_access.get_random_local_directory() def _downloader(): return ctx.file_access.get_data(uri, local_folder, is_multipart=True) expected_format = self.get_format(expected_python_type) fd = FlyteDirectory[expected_format](local_folder, _downloader) fd._remote_source = uri return fd TypeEngine.register(FlyteDirToMultipartBlobTransformer())
def dispatch_execute( self, ctx: FlyteContext, input_literal_map: _literal_models.LiteralMap ) -> Union[_literal_models.LiteralMap, _dynamic_job.DynamicJobSpec]: """ This method translates Flyte's Type system based input values and invokes the actual call to the executor This method is also invoked during runtime. `VoidPromise` is returned in the case when the task itself declares no outputs. `Literal Map` is returned when the task returns either one more outputs in the declaration. Individual outputs may be none `DynamicJobSpec` is returned when a dynamic workflow is executed """ # Invoked before the task is executed new_user_params = self.pre_execute(ctx.user_space_params) # Create another execution context with the new user params, but let's keep the same working dir with ctx.new_execution_context( mode=ctx.execution_state.mode, execution_params=new_user_params, working_dir=ctx.execution_state.working_dir, ) as exec_ctx: # TODO We could support default values here too - but not part of the plan right now # Translate the input literals to Python native native_inputs = TypeEngine.literal_map_to_kwargs( exec_ctx, input_literal_map, self.python_interface.inputs) # TODO: Logger should auto inject the current context information to indicate if the task is running within # a workflow or a subworkflow etc logger.info(f"Invoking {self.name} with inputs: {native_inputs}") native_outputs = None try: native_outputs = self.execute(**native_inputs) except Exception as e: logger.exception(f"Exception when executing {e}") raise e logger.info( f"Task executed successfully in user level, outputs: {native_outputs}" ) # Lets run the post_execute method. This may result in a IgnoreOutputs Exception, which is # bubbled up to be handled at the callee layer. native_outputs = self.post_execute(new_user_params, native_outputs) # Short circuit the translation to literal map because what's returned may be a dj spec (or an # already-constructed LiteralMap if the dynamic task was a no-op), not python native values if isinstance(native_outputs, _literal_models.LiteralMap) or isinstance( native_outputs, _dynamic_job.DynamicJobSpec): return native_outputs expected_output_names = list(self.interface.outputs.keys()) if len(expected_output_names) == 1: # Here we have to handle the fact that the task could've been declared with a typing.NamedTuple of # length one. That convention is used for naming outputs - and single-length-NamedTuples are # particularly troublesome but elegant handling of them is not a high priority # Again, we're using the output_tuple_name as a proxy. if self.python_interface.output_tuple_name and isinstance( native_outputs, tuple): native_outputs_as_map = { expected_output_names[0]: native_outputs[0] } else: native_outputs_as_map = { expected_output_names[0]: native_outputs } elif len(expected_output_names) == 0: native_outputs_as_map = {} else: native_outputs_as_map = { expected_output_names[i]: native_outputs[i] for i, _ in enumerate(native_outputs) } # We manually construct a LiteralMap here because task inputs and outputs actually violate the assumption # built into the IDL that all the values of a literal map are of the same type. literals = {} for k, v in native_outputs_as_map.items(): literal_type = self.interface.outputs[k].type py_type = self.get_type_for_output_var(k, v) if isinstance(v, tuple): raise AssertionError( f"Output({k}) in task{self.name} received a tuple {v}, instead of {py_type}" ) try: literals[k] = TypeEngine.to_literal( exec_ctx, v, py_type, literal_type) except Exception as e: raise AssertionError( f"failed to convert return value for var {k}") from e outputs_literal_map = _literal_models.LiteralMap(literals=literals) # After the execute has been successfully completed return outputs_literal_map
blob=Blob(metadata=meta, uri=remote_path or source_path))) def to_python_value( self, ctx: FlyteContext, lv: Literal, expected_python_type: typing.Type[FlyteFile]) -> FlyteFile: uri = lv.scalar.blob.uri # This is a local file path, like /usr/local/my_file, don't mess with it. Certainly, downloading it doesn't # make any sense. if not ctx.file_access.is_remote(uri): return expected_python_type(uri) # For the remote case, return an FlyteFile object that can download local_path = ctx.file_access.get_random_local_path(uri) def _downloader(): return ctx.file_access.get_data(uri, local_path, is_multipart=False) expected_format = FlyteFilePathTransformer.get_format( expected_python_type) ff = FlyteFile[expected_format](local_path, _downloader) ff._remote_source = uri return ff TypeEngine.register(FlyteFilePathTransformer())
def get_literal_type(self, t: Type[_params.ParameterRangeOneOf]) -> LiteralType: return primitives.Generic.to_flyte_literal_type() def to_literal( self, ctx: FlyteContext, python_val: _params.ParameterRangeOneOf, python_type: Type[_hpo_job_model.HyperparameterTuningJobConfig], expected: LiteralType, ) -> Literal: d = MessageToDict(python_val.to_flyte_idl()) return DictTransformer.dict_to_generic_literal(d) def to_python_value( self, ctx: FlyteContext, lv: Literal, expected_python_type: Type[_params.ParameterRangeOneOf] ) -> _params.ParameterRangeOneOf: if lv and lv.scalar and lv.scalar.generic is not None: d = json.loads(json_format.MessageToJson(lv.scalar.generic)) o = _pb2_params.ParameterRangeOneOf() o = json_format.ParseDict(d, o) return _params.ParameterRangeOneOf.from_flyte_idl(o) return None # %% # Register the types TypeEngine.register(HPOTuningJobConfigTransformer()) TypeEngine.register(ParameterRangesTransformer())