def generate_kfp_executable(self, pipeline_graph, pipeline_parameters, save_to_tmp=False): """Generate a Python executable starting from a Graph.""" self.logger.debug("------------- Kale Start Run -------------") # generate full kfp pipeline definition gen_args = { "nb_graph": pipeline_graph, "nb_path": os.path.abspath(self.source_path), "pipeline_parameters": pipeline_parameters, "metadata": self.pipeline_metadata, "auto_snapshot": self.auto_snapshot } kfp_code = generate_code.gen_kfp_code(**gen_args) if save_to_tmp: output_path = None else: notebook_dir = os.path.dirname(self.source_path) filename = "{}.kale.py".format( self.pipeline_metadata['pipeline_name']) output_path = os.path.join(notebook_dir, filename) # save kfp generated code output_path = self.save_pipeline(kfp_code, output_path) return output_path
def run(self): self.logger.debug("------------- Kale Start Run -------------") try: # validate provided metadata self.validate_metadata() # convert notebook to nx graph pipeline_graph, pipeline_parameters_code_block = parser.parse_notebook( self.source_path, self.nbformat_version) pipeline_parameters_dict = dep_analysis.pipeline_parameters_detection( pipeline_parameters_code_block) # run static analysis over the source code dep_analysis.variables_dependencies_detection( pipeline_graph, ignore_symbols=set(pipeline_parameters_dict.keys())) # TODO: Run a static analysis over every step to check that pipeline parameters are not assigned with new values. # in case the user did not specify a custom docker image, use the same base image of # the current Notebook Server if self.docker_base_image == '': try: self.docker_base_image = pod_utils.get_docker_base_image() except ConfigException: # no K8s config found # use kfp default image pass except Exception: raise # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, experiment_name=self.experiment_name, pipeline_name=self.pipeline_name, pipeline_description=self.pipeline_description, pipeline_parameters=pipeline_parameters_dict, docker_base_image=self.docker_base_image, volumes=self.volumes, deploy_pipeline=self.run_pipeline, working_dir=self.abs_working_dir) # save kfp generated code self.save_pipeline(kfp_code) # deploy pipeline to KFP instance if self.upload_pipeline or self.run_pipeline: return self.deploy_pipeline_to_kfp(self.output_path) except Exception as e: # self.logger.debug(traceback.print_exc()) self.logger.debug(e, exc_info=True) self.logger.error(e) self.logger.error( "To see full traceback run Kale with --debug flag or have a look at kale.log logfile" )
def generate_kfp_executable(self, pipeline_graph, pipeline_parameters): self.logger.debug("------------- Kale Start Run -------------") # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, pipeline_parameters=pipeline_parameters, metadata=self.pipeline_metadata) output_path = os.path.join( os.path.dirname(self.source_path), f"{self.pipeline_metadata['pipeline_name']}.kale.py") # save kfp generated code self.save_pipeline(kfp_code, output_path) return output_path
def generate_kfp_executable(self, pipeline_graph, pipeline_parameters): self.logger.debug("------------- Kale Start Run -------------") # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, nb_path=os.path.abspath(self.source_path), pipeline_parameters=pipeline_parameters, metadata=self.pipeline_metadata, auto_snapshot=self.auto_snapshot) pipeline_name = self.pipeline_metadata['pipeline_name'] kale_file_name = "{}.kale.py".format(pipeline_name) output_path = os.path.join(os.path.dirname(self.source_path), kale_file_name) # save kfp generated code self.save_pipeline(kfp_code, output_path) return output_path
def generate_kfp_executable(self, pipeline_graph, pipeline_parameters, save_to_tmp=False): self.logger.debug("------------- Kale Start Run -------------") # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, nb_path=os.path.abspath(self.source_path), pipeline_parameters=pipeline_parameters, metadata=self.pipeline_metadata, auto_snapshot=self.auto_snapshot) if save_to_tmp: output_path = None else: notebook_dir = os.path.dirname(self.source_path) filename = "{}.kale.py".format( self.pipeline_metadata['pipeline_name']) output_path = os.path.join(notebook_dir, filename) # save kfp generated code output_path = self.save_pipeline(kfp_code, output_path) return output_path
def run(self): # convert notebook to nx graph pipeline_graph = parser.parse_notebook(self.source_path, self.nbformat_version) # run static analysis over the source code dep_analysis.variables_dependencies_detection(pipeline_graph) # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, pipeline_name=self.pipeline_name, pipeline_description=self.pipeline_description, docker_base_image=self.docker_base_image, mount_host_path=self.mount_host_path, mount_container_path=self.mount_container_path, deploy_pipeline=self.deploy_pipeline) # save kfp generated code self.save_pipeline(kfp_code) # deploy pipeline to KFP instance if self.deploy_pipeline: self.deploy_pipeline_to_kfp(self.output_path)