def test_generator_tif_creation(tmp_path): generator_param = {} # We are reusing the data generator for training here. generator_param["type"] = "generator" generator_param["name"] = "SingleTifGenerator" generator_param["pre_post_frame"] = 30 generator_param["pre_post_omission"] = 0 generator_param[ "steps_per_epoch" ] = -1 generator_param["train_path"] = os.path.join( pathlib.Path(__file__).parent.absolute(), "..", "sample_data", "ophys_tiny_761605196.tif", ) generator_param["batch_size"] = 5 generator_param["start_frame"] = 0 generator_param["end_frame"] = 99 generator_param[ "randomize" ] = 0 path_generator = os.path.join(tmp_path, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) assert len(data_generator) == 8
def test_generator_ephys_creation(tmp_path): generator_param = {} generator_param["type"] = "generator" generator_param["name"] = "EphysGenerator" generator_param["pre_post_frame"] = 30 generator_param["pre_post_omission"] = 1 generator_param[ "steps_per_epoch" ] = -1 generator_param["train_path"] = os.path.join( pathlib.Path(__file__).parent.absolute(), "..", "sample_data", "ephys_tiny_continuous.dat2", ) generator_param["batch_size"] = 10 generator_param["start_frame"] = 0 generator_param["end_frame"] = -1 generator_param[ "randomize" ] = 0 path_generator = os.path.join(tmp_path, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) assert len(data_generator) == 4996
def _get_ephys_model(jobdir, generator_params, inference_params): path_generator = os.path.join(jobdir, "generator.json") json_obj = JsonSaver(generator_params) json_obj.save_json(path_generator) path_infer = os.path.join(jobdir, "inferrence.json") json_obj = JsonSaver(inference_params) json_obj.save_json(path_infer) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) inferrence_obj = ClassLoader(path_infer) model = inferrence_obj.find_and_build()(path_infer, data_generator) return model
def run(self): self.logger.name = type(self).__name__ outdir = Path(self.args['inference_params']['output_file']).parent if self.args["output_full_args"]: full_args_path = outdir / "inference_full_args.json" with open(full_args_path, "w") as f: json.dump(self.args, f, indent=2) self.logger.info(f"wrote {full_args_path}") uid = self.args['run_uid'] # save the json parameters to 2 different files inference_json_path = outdir / f"{uid}_inference.json" with open(inference_json_path, "w") as f: json.dump(self.args['inference_params'], f, indent=2) self.logger.info(f"wrote {inference_json_path}") generator_json_path = outdir / f"{uid}_generator.json" with open(generator_json_path, "w") as f: json.dump(self.args['generator_params'], f, indent=2) self.logger.info(f"wrote {generator_json_path}") generator_obj = ClassLoader(generator_json_path) data_generator = generator_obj.find_and_build()(generator_json_path) inferrence_obj = ClassLoader(inference_json_path) inferrence_class = inferrence_obj.find_and_build()( inference_json_path, data_generator) self.logger.info("created objects for inference") inferrence_class.run() # patch up the output movie # This code below will go within the inference library as pre/post # processings modules. Adding temporary fix to remove for non-h5 files # so that the CLI works with tiff, dat, ... files. if '.h5' in self.args["generator_params"]["train_path"]: self.logger.info("fixing up the range and shape of the result") data = normalize_uint16_output( Path(self.args["generator_params"]["train_path"]), Path(self.args["inference_params"]["output_file"])) with h5py.File( self.args["inference_params"]["output_file"], "w") as f: f.create_dataset("data", data=data) self.logger.info( f"wrote {self.args['inference_params']['output_file']}")
json_obj = JsonSaver(training_param) json_obj.save_json(path_training) path_generator = os.path.join(jobdir, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_test_generator = os.path.join(jobdir, "test_generator.json") json_obj = JsonSaver(generator_test_param) json_obj.save_json(path_test_generator) path_network = os.path.join(jobdir, "network.json") json_obj = JsonSaver(network_param) json_obj.save_json(path_network) generator_obj = ClassLoader(path_generator) generator_test_obj = ClassLoader(path_test_generator) network_obj = ClassLoader(path_network) trainer_obj = ClassLoader(path_training) train_generator = generator_obj.find_and_build()(path_generator) test_generator = generator_test_obj.find_and_build()(path_test_generator) network_callback = network_obj.find_and_build()(path_network) training_class = trainer_obj.find_and_build()(train_generator, test_generator, network_callback, path_training) training_class.run()
inferrence_param["name"] = "fmri_inferrence" inferrence_param[ "model_path"] = "/Users/jeromel/Documents/Work documents/Allen Institute/Projects/Deep2P/fMRI/trained_fmri_models/fmri_volume_dense_denoiser_mean_absolute_error_2020_08_25_23_54_2020_08_25_23_54/2020_08_25_23_54_fmri_volume_dense_denoiser_mean_absolute_error_2020_08_25_23_54_model.h5" inferrence_param["output_file"] = os.path.join( path_output, "denoised_" + indiv_inferrence_file) jobdir = "/Users/jeromel/Documents/Work documents/Allen Institute/Projects/Deep2P/fMRI/studyimagenet/denoised" try: os.mkdir(jobdir) except: print("folder already exists") path_generator = os.path.join(jobdir, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_infer = os.path.join(jobdir, "inferrence.json") json_obj = JsonSaver(inferrence_param) json_obj.save_json(path_infer) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) inferrence_obj = ClassLoader(path_infer) inferrence_class = inferrence_obj.find_and_build()(path_infer, data_generator) inferrence_class.run()
def main(argv): opts, args = getopt.getopt( argv, [], [ "movie_path=", "frame_start=", "frame_end=", "output_file=", "model_file=", "batch_size=", "pre_frame=", "post_frame=", "model_norm=", "save_raw=", ], ) # default save_raw = False for opt, arg in opts: if opt == "--movie_path": movie_path = arg if opt == "--frame_start": input_frames_start = np.int(arg) if opt == "--frame_end": input_frames_end = np.int(arg) if opt == "--output_file": output_file = arg if opt == "--model_file": model_path = arg if opt == "--batch_size": batch_size = int(arg) if opt == "--pre_frame": pre_frame = int(arg) if opt == "--post_frame": post_frame = int(arg) if opt == "--save_raw": save_raw = bool(arg) NotDone = True generator_param = {} inferrence_param = {} generator_param["type"] = "generator" generator_param["name"] = "OphysGenerator" generator_param["pre_frame"] = pre_frame generator_param["post_frame"] = post_frame # This is meant to allow compatibility with a generator also used in training generator_param["steps_per_epoch"] = 100 generator_param["batch_size"] = batch_size generator_param["start_frame"] = input_frames_start generator_param["end_frame"] = input_frames_end generator_param["movie_path"] = movie_path generator_param["randomize"] = 0 inferrence_param["type"] = "inferrence" inferrence_param["name"] = "core_inferrence" inferrence_param["model_path"] = model_path inferrence_param["output_file"] = output_file inferrence_param["save_raw"] = save_raw while NotDone: path_generator = output_file + ".generator.json" json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_infer = output_file + ".inferrence.json" json_obj = JsonSaver(inferrence_param) json_obj.save_json(path_infer) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) inferrence_obj = ClassLoader(path_infer) inferrence_class = inferrence_obj.find_and_build()(path_infer, data_generator) inferrence_class.run() NotDone = False # to notify process is finished finish_file = h5py.File(output_file + ".done", "w") finish_file.close()
# Here we create all json files that are fed to the training. This is used for recording purposes as well as input to the # training process path_training = os.path.join(jobdir, "training.json") json_obj = JsonSaver(training_param) json_obj.save_json(path_training) path_generator = os.path.join(jobdir, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_test_generator = os.path.join(jobdir, "test_generator.json") json_obj = JsonSaver(generator_test_param) json_obj.save_json(path_test_generator) # We find the generator obj in the collection using the json file generator_obj = ClassLoader(path_generator) generator_test_obj = ClassLoader(path_test_generator) # We find the training obj in the collection using the json file trainer_obj = ClassLoader(path_training) # We build the generators object. This will, among other things, calculate normalizing parameters. train_generator = generator_obj.find_and_build()(path_generator) test_generator = generator_test_obj.find_and_build()(path_test_generator) path_to_original_model = r"/Users/jeromel/test/transfer_mean_absolute_error_2020_11_12_18_05_2020_11_12_18_05/2020_11_12_18_05_transfer_mean_absolute_error_2020_11_12_18_05_model.h5" # We build the training object. training_class = trainer_obj.find_and_build()(train_generator, test_generator, path_to_original_model, path_training)
def test_ephys_training(tmp_path): # Initialize meta-parameters objects training_param = {} generator_param = {} network_param = {} generator_test_param = {} steps_per_epoch = 2 generator_test_param["type"] = "generator" # type of collection generator_test_param["name"] = "EphysGenerator" generator_test_param[ "pre_post_frame" ] = 30 # Number of frame provided before and after the predicted frame generator_test_param["train_path"] = os.path.join( pathlib.Path(__file__).parent.absolute(), "..", "sample_data", "ephys_tiny_continuous.dat2", ) generator_test_param["batch_size"] = 10 generator_test_param["start_frame"] = 0 generator_test_param["end_frame"] = 30 generator_test_param[ "pre_post_omission" ] = 1 # Number of frame omitted before and after the predicted frame generator_test_param[ "steps_per_epoch" ] = -1 generator_param["type"] = "generator" generator_param["steps_per_epoch"] = steps_per_epoch generator_param["name"] = "EphysGenerator" generator_param["pre_post_frame"] = 30 generator_param["train_path"] = os.path.join( pathlib.Path(__file__).parent.absolute(), "..", "sample_data", "ephys_tiny_continuous.dat2", ) generator_param["batch_size"] = 10 generator_param["start_frame"] = 2000 generator_param["end_frame"] = 2030 generator_param["pre_post_omission"] = 1 # Those are parameters used for the network topology network_param["type"] = "network" network_param[ "name" ] = "unet_single_ephys_1024" # Name of network topology in the collection # Those are parameters used for the training process training_param["type"] = "trainer" training_param["name"] = "core_trainer" training_param["run_uid"] = 'tmp' training_param["batch_size"] = generator_test_param["batch_size"] training_param["steps_per_epoch"] = steps_per_epoch training_param[ "period_save" ] = 25 training_param["nb_gpus"] = 0 training_param["apply_learning_decay"] = 0 training_param[ "nb_times_through_data" ] = 1 training_param["learning_rate"] = 0.0001 training_param["pre_post_frame"] = generator_test_param["pre_post_frame"] training_param["loss"] = "mean_absolute_error" training_param[ "nb_workers" ] = 1 training_param["model_string"] = ( network_param["name"] + "-" + training_param["loss"] ) jobdir = tmp_path training_param["output_dir"] = os.fspath(jobdir) path_training = os.path.join(jobdir, "training.json") json_obj = JsonSaver(training_param) print(path_training) json_obj.save_json(path_training) path_generator = os.path.join(jobdir, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_test_generator = os.path.join(jobdir, "test-generator.json") json_obj = JsonSaver(generator_test_param) json_obj.save_json(path_test_generator) path_network = os.path.join(jobdir, "network.json") json_obj = JsonSaver(network_param) json_obj.save_json(path_network) # We find the generator obj in the collection using the json file generator_obj = ClassLoader(path_generator) generator_test_obj = ClassLoader(path_test_generator) # We find the network obj in the collection using the json file network_obj = ClassLoader(path_network) # We find the training obj in the collection using the json file trainer_obj = ClassLoader(path_training) train_generator = generator_obj.find_and_build()(path_generator) test_generator = generator_test_obj.find_and_build()(path_test_generator) network_callback = network_obj.find_and_build()(path_network) training_class = trainer_obj.find_and_build()( train_generator, test_generator, network_callback, path_training ) training_class.run() # Finalize and save output of the training. training_class.finalize() # Validation is a bit random due to initilization. We check that you get # reasonable number assert training_class.model_train.history["val_loss"][-1] < 1
def main(argv): opts, args = getopt.getopt( argv, [], [ "json_path=", "output_file=", "model_file=", "batch_size=", "pre_frame=", "post_frame=", ], ) for opt, arg in opts: if opt == "--json_path": json_path = arg if opt == "--output_file": output_file = arg if opt == "--model_file": model_path = arg if opt == "--batch_size": batch_size = int(arg) if opt == "--pre_frame": pre_frame = int(arg) if opt == "--post_frame": post_frame = int(arg) NotDone = True generator_param = {} inferrence_param = {} generator_param["type"] = "generator" generator_param["name"] = "MovieJSONGenerator" generator_param["pre_frame"] = pre_frame generator_param["post_frame"] = post_frame generator_param["batch_size"] = batch_size generator_param["train_path"] = json_path # This parameter is not used in this context but is needed generator_param["steps_per_epoch"] = 10 inferrence_param["type"] = "inferrence" inferrence_param["name"] = "core_inferrence" inferrence_param["model_path"] = model_path inferrence_param["output_file"] = output_file inferrence_param["save_raw"] = True inferrence_param["rescale"] = False while NotDone: path_generator = output_file + ".generator.json" json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_infer = output_file + ".inferrence.json" json_obj = JsonSaver(inferrence_param) json_obj.save_json(path_infer) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) inferrence_obj = ClassLoader(path_infer) inferrence_class = inferrence_obj.find_and_build()(path_infer, data_generator) inferrence_class.run() NotDone = False # to notify process is finished finish_file = h5py.File(output_file + ".done", "w") finish_file.close()
path_training = os.path.join(jobdir, "training.json") json_obj = JsonSaver(training_param) json_obj.save_json(path_training) list_train_generator = [] for local_index, indiv_generator in enumerate(generator_param_list): if local_index == 0: indiv_generator["initialize_list"] = 1 else: indiv_generator["initialize_list"] = 0 path_generator = os.path.join(jobdir, "generator" + str(local_index) + ".json") json_obj = JsonSaver(indiv_generator) json_obj.save_json(path_generator) generator_obj = ClassLoader(path_generator) train_generator = generator_obj.find_and_build()(path_generator) # we don't need to set a random set of points for all 100 or so if local_index == 0: keep_generator = train_generator else: train_generator.x_list = keep_generator.x_list train_generator.y_list = keep_generator.y_list train_generator.z_list = keep_generator.z_list train_generator.t_list = keep_generator.t_list list_train_generator.append(train_generator) path_test_generator = os.path.join(jobdir, "test_generator.json") json_obj = JsonSaver(generator_test_param)
os.mkdir(jobdir, 0o775) except: print("folder already exists") path_training = os.path.join(jobdir, "training.json") json_obj = JsonSaver(training_param) json_obj.save_json(path_training) list_train_generator = [] for local_index, indiv_generator in enumerate(generator_param_list): path_generator = os.path.join(jobdir, "generator" + str(local_index) + ".json") json_obj = JsonSaver(indiv_generator) json_obj.save_json(path_generator) generator_obj = ClassLoader(path_generator) train_generator = generator_obj.find_and_build()(path_generator) list_train_generator.append(train_generator) path_test_generator = os.path.join(jobdir, "test_generator.json") json_obj = JsonSaver(generator_test_param) json_obj.save_json(path_test_generator) #path_network = os.path.join(jobdir, "network.json") #json_obj = JsonSaver(network_param) #json_obj.save_json(path_network) generator_obj = ClassLoader(path_generator) generator_test_obj = ClassLoader(path_test_generator)
def main(argv): opts, args = getopt.getopt( argv, [], [ "movie_path=", "train_frame_start=", "train_frame_end=", "train_total_samples=", "val_frame_start=", "val_frame_end=", "val_total_samples=", "output_path=", "model_file=", "batch_size=", "pre_post_frame=", "pre_post_omission=", "loss=", ], ) # default train_frame_start = 20000 train_frame_end = -1 train_total_samples = 10000000 val_frame_start = 0 val_frame_end = 19999 val_total_samples = -1 batch_size = 100 pre_post_frame = 30 pre_post_omission = 1 loss = 'mean_squared_error' for opt, arg in opts: if opt == "--movie_path": movie_path = arg if opt == "--train_frame_start": train_frame_start = np.int(arg) if opt == "--train_frame_end": train_frame_end = np.int(arg) if opt == "--train_total_samples": train_total_samples = np.int(arg) if opt == "--val_frame_start": val_frame_start = np.int(arg) if opt == "--val_frame_end": val_frame_end = np.int(arg) if opt == "--val_total_samples": val_total_samples = np.int(arg) if opt == "--batch_size": batch_size = np.int(arg) if opt == "--output_path": output_path = arg if opt == "--model_file": model_file = arg if opt == "--batch_size": batch_size = np.int(arg) if opt == "--pre_post_frame": pre_post_frame = np.int(arg) if opt == "--pre_post_omission": pre_post_omission = np.int(arg) if opt == "--loss": loss = arg now = datetime.datetime.now() run_uid = now.strftime("%Y_%m_%d_%H_%M") training_param = {} generator_test_param = {} generator_param = {} generator_param["type"] = "generator" generator_param["name"] = "EphysGenerator" generator_param["pre_post_frame"] = pre_post_frame generator_param["train_path"] = movie_path generator_param["batch_size"] = batch_size # 100 generator_param["start_frame"] = train_frame_start # 20000 generator_param["end_frame"] = train_frame_end # -1 generator_param["pre_post_omission"] = pre_post_omission # 1 generator_param["randomize"] = 1 generator_param["steps_per_epoch"] = 100 generator_param["total_samples"] = train_total_samples # 10000000 generator_test_param["type"] = "generator" generator_test_param["name"] = "EphysGenerator" generator_test_param["pre_post_frame"] = pre_post_frame generator_test_param["train_path"] = movie_path generator_test_param["batch_size"] = batch_size generator_test_param["start_frame"] = val_frame_start # 0 generator_test_param["end_frame"] = val_frame_end # 19999 generator_test_param["pre_post_omission"] = pre_post_omission generator_test_param["randomize"] = 1 generator_test_param["steps_per_epoch"] = -1 generator_test_param["total_samples"] = val_total_samples # -1 training_param["type"] = "trainer" training_param["name"] = "transfer_trainer" training_param["model_path"] = model_file training_param["run_uid"] = run_uid training_param["batch_size"] = generator_test_param["batch_size"] training_param["steps_per_epoch"] = generator_param["steps_per_epoch"] training_param["period_save"] = 25 training_param["nb_gpus"] = 1 training_param["nb_times_through_data"] = 1 training_param["learning_rate"] = 0.0005 training_param["apply_learning_decay"] = 1 training_param["initial_learning_rate"] = 0.0005 training_param["epochs_drop"] = 300 training_param["loss"] = loss training_param["model_string"] = 'transfer_train_' + \ "_" + training_param["loss"] training_param["output_dir"] = output_path try: os.mkdir(output_path) except Exception: print("folder already exists") path_training = os.path.join(output_path, "training.json") json_obj = JsonSaver(training_param) json_obj.save_json(path_training) path_generator = os.path.join(output_path, "generator.json") json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) generator_obj = ClassLoader(path_generator) train_generator = generator_obj.find_and_build()(path_generator) path_test_generator = os.path.join(output_path, "test_generator.json") json_obj = JsonSaver(generator_test_param) json_obj.save_json(path_test_generator) generator_test_obj = ClassLoader(path_test_generator) trainer_obj = ClassLoader(path_training) test_generator = generator_test_obj.find_and_build()(path_test_generator) training_class = trainer_obj.find_and_build()(train_generator, test_generator, path_training) training_class.run() training_class.finalize()
def main(argv): opts, args = getopt.getopt( argv, [], [ "movie_path=", "frame_start=", "frame_end=", "output_file=", "model_file=", "batch_size=", "pre_post_frame=", "model_norm=", "pre_post_omission=", ], ) for opt, arg in opts: if opt == "--movie_path": movie_path = arg if opt == "--frame_start": input_frames_start = np.int(arg) if opt == "--frame_end": input_frames_end = np.int(arg) if opt == "--output_file": output_file = arg if opt == "--model_file": model_path = arg if opt == "--batch_size": batch_size = int(arg) if opt == "--pre_post_frame": pre_post_frame = int(arg) if opt == "--pre_post_omission": pre_post_omission = int(arg) model = load_model(model_path) frame_start = input_frames_start frame_end = input_frames_end NotDone = True trial = 0 generator_param = {} inferrence_param = {} generator_param["type"] = "generator" generator_param["name"] = "EphysGenerator" generator_param["pre_post_frame"] = pre_post_frame generator_param["pre_post_omission"] = pre_post_omission generator_param["batch_size"] = batch_size generator_param["start_frame"] = input_frames_start generator_param["end_frame"] = input_frames_end generator_param["randomize"] = 0 generator_param["train_path"] = movie_path inferrence_param["type"] = "inferrence" inferrence_param["name"] = "core_inferrence" inferrence_param["model_path"] = model_path inferrence_param["output_file"] = output_file while NotDone: path_generator = output_file + ".generator.json" json_obj = JsonSaver(generator_param) json_obj.save_json(path_generator) path_infer = output_file + ".inferrence.json" json_obj = JsonSaver(inferrence_param) json_obj.save_json(path_infer) generator_obj = ClassLoader(path_generator) data_generator = generator_obj.find_and_build()(path_generator) inferrence_obj = ClassLoader(path_infer) inferrence_class = inferrence_obj.find_and_build()(path_infer, data_generator) inferrence_class.run() NotDone = False # to notify process is finished finish_file = h5py.File(output_file + ".done", "w") finish_file.close()