def deserialize_properties(props_struct: struct_pb2.Struct) -> Any: """ Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal Python types. """ # Check out this link for details on what sort of types Protobuf is going to generate: # https://developers.google.com/protocol-buffers/docs/reference/python-generated # # We assume that we are deserializing properties that we got from a Resource RPC endpoint, # which has type `Struct` in our gRPC proto definition. if _special_sig_key in props_struct: if props_struct[_special_sig_key] == _special_asset_sig: # This is an asset. Re-hydrate this object into an Asset. if "path" in props_struct: return known_types.new_file_asset(props_struct["path"]) if "text" in props_struct: return known_types.new_string_asset(props_struct["text"]) if "uri" in props_struct: return known_types.new_remote_asset(props_struct["uri"]) raise AssertionError("Invalid asset encountered when unmarshaling resource property") if props_struct[_special_sig_key] == _special_archive_sig: # This is an archive. Re-hydrate this object into an Archive. if "assets" in props_struct: return known_types.new_asset_archive(deserialize_property(props_struct["assets"])) if "path" in props_struct: return known_types.new_file_archive(props_struct["path"]) if "uri" in props_struct: return known_types.new_remote_archive(props_struct["uri"]) raise AssertionError("Invalid archive encountered when unmarshaling resource property") if props_struct[_special_sig_key] == _special_secret_sig: return { _special_sig_key: _special_secret_sig, "value": deserialize_property(props_struct["value"]) } raise AssertionError("Unrecognized signature when unmarshaling resource property") # Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways. Note # that if the struct had any secret properties, we push the secretness of the object up to us # since we can only set secret outputs on top level properties. output = {} for k, v in list(props_struct.items()): value = deserialize_property(v) # We treat values that deserialize to "None" as if they don't exist. if value is not None: output[k] = value return output
def resolve_outputs(res: 'Resource', serialized_props: struct_pb2.Struct, outputs: struct_pb2.Struct, deps: Mapping[str, Set['Resource']], resolvers: Dict[str, Resolver], transform_using_type_metadata: bool = False): # Produce a combined set of property states, starting with inputs and then applying # outputs. If the same property exists in the inputs and outputs states, the output wins. all_properties = {} # Get the resource's output types, so we can convert dicts from the engine into actual # instantiated output types or primitive types into enums as needed. resource_cls = type(res) types = _types.resource_types(resource_cls) translate, translate_to_pass = res.translate_output_property, res.translate_output_property if transform_using_type_metadata: pulumi_to_py_names = _types.resource_pulumi_to_py_names(resource_cls) translate = lambda k: pulumi_to_py_names.get(k) or k translate_to_pass = lambda k: k for key, value in deserialize_properties(outputs).items(): # Outputs coming from the provider are NOT translated. Do so here. translated_key = translate(key) translated_value = translate_output_properties( value, translate_to_pass, types.get(key), transform_using_type_metadata) log.debug( f"incoming output property translated: {key} -> {translated_key}") log.debug( f"incoming output value translated: {value} -> {translated_value}") all_properties[translated_key] = translated_value if not settings.is_dry_run() or settings.is_legacy_apply_enabled(): for key, value in list(serialized_props.items()): translated_key = translate(key) if translated_key not in all_properties: # input prop the engine didn't give us a final value for.Just use the value passed into the resource by # the user. all_properties[translated_key] = translate_output_properties( deserialize_property(value), translate_to_pass, types.get(key), transform_using_type_metadata) resolve_properties(resolvers, all_properties, deps)
def deserialize_properties(props_struct: struct_pb2.Struct, keep_unknowns: Optional[bool] = None) -> Any: """ Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal Python types. """ # Check out this link for details on what sort of types Protobuf is going to generate: # https://developers.google.com/protocol-buffers/docs/reference/python-generated # # We assume that we are deserializing properties that we got from a Resource RPC endpoint, # which has type `Struct` in our gRPC proto definition. if _special_sig_key in props_struct: from .. import FileAsset, StringAsset, RemoteAsset, AssetArchive, FileArchive, RemoteArchive # pylint: disable=import-outside-toplevel if props_struct[_special_sig_key] == _special_asset_sig: # This is an asset. Re-hydrate this object into an Asset. if "path" in props_struct: return FileAsset(props_struct["path"]) if "text" in props_struct: return StringAsset(props_struct["text"]) if "uri" in props_struct: return RemoteAsset(props_struct["uri"]) raise AssertionError( "Invalid asset encountered when unmarshalling resource property" ) if props_struct[_special_sig_key] == _special_archive_sig: # This is an archive. Re-hydrate this object into an Archive. if "assets" in props_struct: return AssetArchive( deserialize_property(props_struct["assets"])) if "path" in props_struct: return FileArchive(props_struct["path"]) if "uri" in props_struct: return RemoteArchive(props_struct["uri"]) raise AssertionError( "Invalid archive encountered when unmarshalling resource property" ) if props_struct[_special_sig_key] == _special_secret_sig: return wrap_rpc_secret(deserialize_property(props_struct["value"])) if props_struct[_special_sig_key] == _special_resource_sig: return deserialize_resource(props_struct, keep_unknowns) raise AssertionError( "Unrecognized signature when unmarshalling resource property") # Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways. Note # that if the struct had any secret properties, we push the secretness of the object up to us # since we can only set secret outputs on top level properties. output = {} for k, v in list(props_struct.items()): # Unilaterally skip properties considered internal by the Pulumi engine. # These don't actually contribute to the exposed shape of the object, do # not need to be passed back to the engine, and often will not match the # expected type we are deserializing into. # Keep "__provider" as it's the property name used by Python dynamic providers. if k.startswith("__") and k != "__provider": continue value = deserialize_property(v, keep_unknowns) # We treat values that deserialize to "None" as if they don't exist. if value is not None: output[k] = value return output
async def resolve_outputs(res: 'Resource', serialized_props: struct_pb2.Struct, outputs: struct_pb2.Struct, resolvers: Dict[str, Resolver]): # Produce a combined set of property states, starting with inputs and then applying # outputs. If the same property exists in the inputs and outputs states, the output wins. all_properties = {} for key, value in deserialize_properties(outputs).items(): # Outputs coming from the provider are NOT translated. Do so here. translated_key = res.translate_output_property(key) translated_value = translate_output_properties(res, value) log.debug(f"incoming output property translated: {key} -> {translated_key}") log.debug(f"incoming output value translated: {value} -> {translated_value}") all_properties[translated_key] = translated_value if not settings.is_dry_run() or settings.is_legacy_apply_enabled(): for key, value in list(serialized_props.items()): translated_key = res.translate_output_property(key) if translated_key not in all_properties: # input prop the engine didn't give us a final value for.Just use the value passed into the resource by # the user. all_properties[translated_key] = translate_output_properties(res, deserialize_property(value)) for key, value in all_properties.items(): # Skip "id" and "urn", since we handle those specially. if key in ["id", "urn"]: continue # Otherwise, unmarshal the value, and store it on the resource object. log.debug(f"looking for resolver using translated name {key}") resolve = resolvers.get(key) if resolve is None: # engine returned a property that was not in our initial property-map. This can happen # for outputs that were registered through direct calls to 'registerOutputs'. We do # *not* want to do anything with these returned properties. First, the component # resources that were calling 'registerOutputs' will have already assigned these fields # directly on them themselves. Second, if we were to try to assign here we would have # an incredibly bad race condition for two reasons: # # 1. This call to 'resolveProperties' happens asynchronously at some point far after # the resource was constructed. So the user will have been able to observe the # initial value up until we get to this point. # # 2. The component resource will have often assigned a value of some arbitrary type # (say, a 'string'). If we overwrite this with an `Output<string>` we'll be changing # the type at some non-deterministic point in the future. continue # Secrets are passed back as object with our special signiture key set to _special_secret_sig, in this case # we have to unwrap the object to get the actual underlying value. is_secret = False if isinstance(value, dict) and _special_sig_key in value and value[_special_sig_key] == _special_secret_sig: is_secret = True value = value["value"] # If either we are performing a real deployment, or this is a stable property value, we # can propagate its final value. Otherwise, it must be undefined, since we don't know # if it's final. if not settings.is_dry_run(): # normal 'pulumi up'. resolve the output with the value we got back # from the engine. That output can always run its .apply calls. resolve(value, True, is_secret, None) else: # We're previewing. If the engine was able to give us a reasonable value back, # then use it. Otherwise, inform the Output that the value isn't known. resolve(value, value is not None and not contains_unknowns(value), is_secret, None) # `allProps` may not have contained a value for every resolver: for example, optional outputs may not be present. # We will resolve all of these values as `None`, and will mark the value as known if we are not running a # preview. for key, resolve in resolvers.items(): if key not in all_properties: resolve(None, not settings.is_dry_run(), False, None)
def deserialize_properties(props_struct: struct_pb2.Struct, keep_unknowns: Optional[bool] = None) -> Any: """ Deserializes a protobuf `struct_pb2.Struct` into a Python dictionary containing normal Python types. """ # Check out this link for details on what sort of types Protobuf is going to generate: # https://developers.google.com/protocol-buffers/docs/reference/python-generated # # We assume that we are deserializing properties that we got from a Resource RPC endpoint, # which has type `Struct` in our gRPC proto definition. if _special_sig_key in props_struct: from .. import FileAsset, StringAsset, RemoteAsset, AssetArchive, FileArchive, RemoteArchive # pylint: disable=import-outside-toplevel if props_struct[_special_sig_key] == _special_asset_sig: # This is an asset. Re-hydrate this object into an Asset. if "path" in props_struct: return FileAsset(props_struct["path"]) if "text" in props_struct: return StringAsset(props_struct["text"]) if "uri" in props_struct: return RemoteAsset(props_struct["uri"]) raise AssertionError( "Invalid asset encountered when unmarshalling resource property" ) if props_struct[_special_sig_key] == _special_archive_sig: # This is an archive. Re-hydrate this object into an Archive. if "assets" in props_struct: return AssetArchive( deserialize_property(props_struct["assets"])) if "path" in props_struct: return FileArchive(props_struct["path"]) if "uri" in props_struct: return RemoteArchive(props_struct["uri"]) raise AssertionError( "Invalid archive encountered when unmarshalling resource property" ) if props_struct[_special_sig_key] == _special_secret_sig: return wrap_rpc_secret(deserialize_property(props_struct["value"])) if props_struct[_special_sig_key] == _special_resource_sig: urn = props_struct["urn"] version = props_struct["version"] urn_parts = urn.split("::") urn_name = urn_parts[3] qualified_type = urn_parts[2] typ = qualified_type.split("$")[-1] typ_parts = typ.split(":") pkg_name = typ_parts[0] mod_name = typ_parts[1] if len(typ_parts) > 1 else "" typ_name = typ_parts[2] if len(typ_parts) > 2 else "" resource = None is_provider = pkg_name == "pulumi" and mod_name == "providers" if is_provider: resource_package = _RESOURCE_PACKAGES.get( _package_key(typ_name, version)) if resource_package is None: raise Exception( f"Unable to deserialize provider {urn}, no resource package is registered for {typ_name}." ) resource = resource_package.construct_provider( urn_name, typ, {}, urn) else: resource_module = _RESOURCE_MODULES.get( _module_key(typ_name, version)) if resource_module is None: raise Exception( f"Unable to deserialize resource {urn}, no resource module is registered for {mod_name}." ) resource_module.construct(urn_name, typ, {}, urn) return cast('Resource', resource) raise AssertionError( "Unrecognized signature when unmarshalling resource property") # Struct is duck-typed like a dictionary, so we can iterate over it in the normal ways. Note # that if the struct had any secret properties, we push the secretness of the object up to us # since we can only set secret outputs on top level properties. output = {} for k, v in list(props_struct.items()): # Unilaterally skip properties considered internal by the Pulumi engine. # These don't actually contribute to the exposed shape of the object, do # not need to be passed back to the engine, and often will not match the # expected type we are deserializing into. # Keep "__provider" as it's the property name used by Python dynamic providers. if k.startswith("__") and k != "__provider": continue value = deserialize_property(v, keep_unknowns) # We treat values that deserialize to "None" as if they don't exist. if value is not None: output[k] = value return output