def save_data_in_pkl(data, output_name): """ Will erase any existing file with the same name """ file_path = get_sandbox_path(output_name + ".pkl") with open(file_path, 'wb+') as output_file: pickle.dump(data, output_file, pickle.HIGHEST_PROTOCOL)
def save_data_in_pkl(data, output_name): """ Will erase any existing file with the same name """ file_path = get_sandbox_path(output_name+".pkl") with open(file_path,'wb+') as output_file : pickle.dump(data, output_file, pickle.HIGHEST_PROTOCOL)
def load_data_from_pkl(filename): """ Load the data saved using save_data_in_pkl. Can load several time the file from the data_sets without additional cost (if called using the set number) """ global data_sets if type(filename) == int: if type(data_sets[int(filename)]) == tuple: return load_data_from_pkl("set_%d" % int(filename)) else: return data_sets[int(filename)] file_path = get_sandbox_path(filename + ".pkl") with open(file_path, 'rb') as input_file: return pickle.load(input_file)
def load_data_from_pkl(filename): """ Load the data saved using save_data_in_pkl. Can load several time the file from the data_sets without additional cost (if called using the set number) """ global data_sets if type(filename) == int : if type(data_sets[int(filename)]) == tuple: return load_data_from_pkl("set_%d" % int(filename)) else: return data_sets[int(filename)] file_path = get_sandbox_path(filename+".pkl") with open(file_path,'rb') as input_file : return pickle.load(input_file)