def __create_json_pipeline_segment_file(self): """ Creates a json file that the segment relates to; this file's segment id can be used later used in the init to get all functions that object already used. """ json_dict = dict() # Pipeline Segment json_dict["Pipeline Segment"] = dict() json_dict["Pipeline Segment"]["Object Type"] = self.__object_type json_dict["Pipeline Segment"]["Functions Performed Order"] = dict() function_order = 1 for function_name, params_dict in self.__function_pipe: json_dict["Pipeline Segment"]["Functions Performed Order"][f"Function " \ f"Order {function_order}"] = dict() json_dict["Pipeline Segment"]["Functions Performed Order"][f"Function " \ f"Order {function_order}"][function_name] = dict() json_dict["Pipeline Segment"]["Functions Performed Order"][ f"Function Order {function_order}"][function_name][ "Params Dict"] = params_dict function_order += 1 json_dict["Pipeline Segment"]["Function Count"] = function_order - 1 # Generate pipeline segment file dict_to_json_file(json_dict, self.folder_path, self.__json_file_name)
def reset_segment_file(self): # File/Folder error checks if not os.path.exists(self.folder_path): raise PipelineSegmentError( "Couldn't find the pipeline segment's folder when trying to configure this object with the provided json file." ) if not os.path.exists(self.folder_path + copy.deepcopy(self.__json_file_name)): raise PipelineSegmentError( f"Couldn't find the pipeline segment's file named '{self.__json_file_name}' in the pipeline's directory when trying to configure this object with the provided json file." ) dict_to_json_file({}, self.folder_path, self.file_name)
def __create_dataframe_snapshot_json_file(self, df, output_folder_path): """ Creates a json file based on the dataframe's generated snapshot dict. Args: df: pd.Dataframe Pandas Dataframe object output_folder_path: string Output path the json object will move to. """ output_folder_path = correct_directory_path(output_folder_path) meta_dict = self.__generate_dataframe_snapshot_dict(df) dict_to_json_file(meta_dict, output_folder_path, "Dataframe Snapshot")
def __create_json_pipeline_file(self): """ Returns: Creates a dict based on the given contents of the variable 'self.__pipeline_segment_deque' to convert to a json file. This file will later be used to instruct our object to execute specific code. """ # ------------- json_dict = dict() segment_order = 1 json_dict["Pipeline Name"] = self.__pipeline_name json_dict["Pipeline Segment Order"] = dict() for segment_name, segment_path_id, pipeline_segment_obj in self.__pipeline_segment_deque: json_dict["Pipeline Segment Order"][segment_order] = dict() json_dict["Pipeline Segment Order"][segment_order][ "Pipeline Segment Path"] = segment_path_id json_dict["Pipeline Segment Order"][segment_order][ "Pipeline Segment Type"] = pipeline_segment_obj.__class__.__name__ json_dict["Pipeline Segment Order"][segment_order][ "Pipeline Segment Name"] = segment_name json_dict["Pipeline Segment Order"][segment_order][ "Pipeline Segment ID"] = segment_path_id.split("/")[-1].split( ".")[0] segment_order += 1 json_dict["Pipeline Segment Count"] = segment_order - 1 # Create a folder for all non-root json files. if self.__pipeline_modify_id: create_dir_structure(self.folder_path, "/Modified Pipelines") dict_to_json_file(json_dict, self.folder_path + "/Modified Pipelines", self.__json_file_name) # Root json files only else: dict_to_json_file(json_dict, self.folder_path, self.__json_file_name)