def run(self): self.logger.debug("------------- Kale Start Run -------------") try: # validate provided metadata self.validate_metadata() # convert notebook to nx graph pipeline_graph, pipeline_parameters_code_block = parser.parse_notebook( self.source_path, self.nbformat_version) pipeline_parameters_dict = dep_analysis.pipeline_parameters_detection( pipeline_parameters_code_block) # run static analysis over the source code dep_analysis.variables_dependencies_detection( pipeline_graph, ignore_symbols=set(pipeline_parameters_dict.keys())) # TODO: Run a static analysis over every step to check that pipeline parameters are not assigned with new values. # in case the user did not specify a custom docker image, use the same base image of # the current Notebook Server if self.docker_base_image == '': try: self.docker_base_image = pod_utils.get_docker_base_image() except ConfigException: # no K8s config found # use kfp default image pass except Exception: raise # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, experiment_name=self.experiment_name, pipeline_name=self.pipeline_name, pipeline_description=self.pipeline_description, pipeline_parameters=pipeline_parameters_dict, docker_base_image=self.docker_base_image, volumes=self.volumes, deploy_pipeline=self.run_pipeline, working_dir=self.abs_working_dir) # save kfp generated code self.save_pipeline(kfp_code) # deploy pipeline to KFP instance if self.upload_pipeline or self.run_pipeline: return self.deploy_pipeline_to_kfp(self.output_path) except Exception as e: # self.logger.debug(traceback.print_exc()) self.logger.debug(e, exc_info=True) self.logger.error(e) self.logger.error( "To see full traceback run Kale with --debug flag or have a look at kale.log logfile" )
def notebook_to_graph(self): # convert notebook to nx graph pipeline_graph, pipeline_parameters_code_block = parser.parse_notebook( self.notebook) pipeline_parameters_dict = dep_analysis.pipeline_parameters_detection( pipeline_parameters_code_block) # run static analysis over the source code dep_analysis.variables_dependencies_detection( pipeline_graph, ignore_symbols=set(pipeline_parameters_dict.keys())) # TODO: Additional Step required: # Run a static analysis over every step to check that pipeline # parameters are not assigned with new values. return pipeline_graph, pipeline_parameters_dict
def run(self): # convert notebook to nx graph pipeline_graph = parser.parse_notebook(self.source_path, self.nbformat_version) # run static analysis over the source code dep_analysis.variables_dependencies_detection(pipeline_graph) # generate full kfp pipeline definition kfp_code = generate_code.gen_kfp_code( nb_graph=pipeline_graph, pipeline_name=self.pipeline_name, pipeline_description=self.pipeline_description, docker_base_image=self.docker_base_image, mount_host_path=self.mount_host_path, mount_container_path=self.mount_container_path, deploy_pipeline=self.deploy_pipeline) # save kfp generated code self.save_pipeline(kfp_code) # deploy pipeline to KFP instance if self.deploy_pipeline: self.deploy_pipeline_to_kfp(self.output_path)