示例#1
0
文件: pipeline.py 项目: zwj0110/spark
    def load(metadata, sc, path):
        """
        Load metadata and stages for a :py:class:`Pipeline` or :py:class:`PipelineModel`

        :return: (UID, list of stages)
        """
        stagesDir = os.path.join(path, "stages")
        stageUids = metadata['paramMap']['stageUids']
        stages = []
        for index, stageUid in enumerate(stageUids):
            stagePath = \
                PipelineSharedReadWrite.getStagePath(stageUid, index, len(stageUids), stagesDir)
            stage = DefaultParamsReader.loadParamsInstance(stagePath, sc)
            stages.append(stage)
        return (metadata['uid'], stages)
示例#2
0
文件: pipeline.py 项目: yliou/spark
    def load(metadata: Dict[str, Any], sc: SparkContext,
             path: str) -> Tuple[str, List["PipelineStage"]]:
        """
        Load metadata and stages for a :py:class:`Pipeline` or :py:class:`PipelineModel`

        Returns
        -------
        tuple
            (UID, list of stages)
        """
        stagesDir = os.path.join(path, "stages")
        stageUids = metadata["paramMap"]["stageUids"]
        stages = []
        for index, stageUid in enumerate(stageUids):
            stagePath = PipelineSharedReadWrite.getStagePath(
                stageUid, index, len(stageUids), stagesDir)
            stage: "PipelineStage" = DefaultParamsReader.loadParamsInstance(
                stagePath, sc)
            stages.append(stage)
        return (metadata["uid"], stages)