async def get(self, runtime_type): self.log.debug( f"Retrieving pipeline components for runtime type: {runtime_type}") processor_manager = PipelineProcessorManager.instance() if processor_manager.is_supported_runtime(runtime_type): # The endpoint path contains the shorthand version of a runtime (e.g., 'kfp', # 'airflow'). This case and its associated functions should eventually be removed # in favor of using the RuntimeProcessorType name in the request path. self.log.warning( f"Deprecation warning: when calling endpoint '{self.request.path}' " f"use runtime type name (e.g. 'KUBEFLOW_PIPELINES', 'APACHE_AIRFLOW') " f"instead of shorthand name (e.g., 'kfp', 'airflow')") runtime_processor_type = processor_manager.get_runtime_type( runtime_type) elif processor_manager.is_supported_runtime_type(runtime_type): # The request path uses the appropriate RuntimeProcessorType name. Use this # to get the RuntimeProcessorType instance to pass to get_all_components runtime_processor_type = RuntimeProcessorType.get_instance_by_name( runtime_type) else: raise web.HTTPError(400, f"Invalid runtime type '{runtime_type}'") # Include generic components for all runtime types components: List[Component] = ComponentCache.get_generic_components() # Add additional runtime-type-specific components, if present components.extend(ComponentCache.instance().get_all_components( platform=runtime_processor_type)) palette_json = ComponentCache.to_canvas_palette(components=components) self.set_status(200) self.set_header("Content-Type", "application/json") await self.finish(palette_json)
async def get(self, runtime_type): self.log.debug( f"Retrieving pipeline components for runtime type: {runtime_type}") runtime_processor_type = get_runtime_processor_type( runtime_type, self.log, self.request.path) if not runtime_processor_type: raise web.HTTPError(400, f"Invalid runtime type '{runtime_type}'") # Include generic components for all runtime types components: List[Component] = ComponentCache.get_generic_components() # Add additional runtime-type-specific components, if present components.extend(ComponentCache.instance().get_all_components( platform=runtime_processor_type)) palette_json = ComponentCache.to_canvas_palette(components=components) self.set_status(200) self.set_header("Content-Type", "application/json") await self.finish(palette_json)
async def _validate_custom_component_node_properties( self, node: Node, response: ValidationResponse, pipeline_definition: PipelineDefinition, pipeline_runtime: str ): """ Validates the properties of the custom component node :param node: the node to be validated :param response: the validation response object to attach any error messages :param pipeline_definition: the pipeline definition containing the node :param pipeline_runtime: the pipeline runtime selected :return: """ component_list = await PipelineProcessorManager.instance().get_components(pipeline_runtime) components = ComponentCache.to_canvas_palette(component_list) # Full dict of properties for the operation e.g. current params, optionals etc component_property_dict = await self._get_component_properties(pipeline_runtime, components, node.op) # List of just the current parameters for the component current_parameter_defaults_list = list( map(lambda x: str(x).replace("elyra_", ""), component_property_dict["current_parameters"].keys()) ) # Remove the non component_parameter jinja templated values we do not check against current_parameter_defaults_list.remove("component_source") current_parameter_defaults_list.remove("label") for default_parameter in current_parameter_defaults_list: node_param = node.get_component_parameter(default_parameter) if self._is_required_property(component_property_dict, default_parameter): if not node_param: response.add_message( severity=ValidationSeverity.Error, message_type="invalidNodeProperty", message="Node is missing required property.", data={"nodeID": node.id, "nodeName": node.label, "propertyName": default_parameter}, ) elif self._get_component_type(component_property_dict, default_parameter) == "inputpath": # Any component property with type `InputPath` will be a dictionary of two keys # "value": the node ID of the parent node containing the output # "option": the name of the key (which is an output) of the above referenced node if ( not isinstance(node_param, dict) or len(node_param) != 2 or set(node_param.keys()) != {"value", "option"} ): response.add_message( severity=ValidationSeverity.Error, message_type="invalidNodeProperty", message="Node has malformed `InputPath` parameter structure", data={"nodeID": node.id, "nodeName": node.label}, ) node_ids = list(x.get("node_id_ref", None) for x in node.component_links) parent_list = self._get_parent_id_list(pipeline_definition, node_ids, []) node_param_value = node_param.get("value") if node_param_value not in parent_list: response.add_message( severity=ValidationSeverity.Error, message_type="invalidNodeProperty", message="Node contains an invalid inputpath reference. Please " "check your node-to-node connections", data={"nodeID": node.id, "nodeName": node.label}, ) elif isinstance(node_param, dict) and node_param.get("activeControl") == "NestedEnumControl": if not node_param.get("NestedEnumControl"): response.add_message( severity=ValidationSeverity.Error, message_type="invalidNodeProperty", message="Node contains an invalid reference to an node output. Please " "check the node properties are configured properly", data={"nodeID": node.id, "nodeName": node.label}, ) else: # TODO: Update this hardcoded check for xcom_push. This parameter is specific to a runtime # (Airflow). i.e. abstraction for byo validation? node_param_value = node_param["NestedEnumControl"].get("value") upstream_node = pipeline_definition.get_node(node_param_value) xcom_param = upstream_node.get_component_parameter("xcom_push") if xcom_param: xcom_value = xcom_param.get("BooleanControl") if not xcom_value: response.add_message( severity=ValidationSeverity.Error, message_type="invalidNodeProperty", message="Node contains an invalid input reference. The parent " "node does not have the xcom_push property enabled", data={ "nodeID": node.id, "nodeName": node.label, "parentNodeID": upstream_node.label, }, )