Exemplo n.º 1
0
def test_generator_ephys_creation(tmp_path):
    generator_param = {}

    generator_param["type"] = "generator"
    generator_param["name"] = "EphysGenerator"
    generator_param["pre_post_frame"] = 30
    generator_param["pre_post_omission"] = 1
    generator_param[
        "steps_per_epoch"
    ] = -1

    generator_param["train_path"] = os.path.join(
        pathlib.Path(__file__).parent.absolute(),
        "..",
        "sample_data",
        "ephys_tiny_continuous.dat2",
    )

    generator_param["batch_size"] = 10
    generator_param["start_frame"] = 0
    generator_param["end_frame"] = -1
    generator_param[
        "randomize"
    ] = 0

    path_generator = os.path.join(tmp_path, "generator.json")
    json_obj = JsonSaver(generator_param)
    json_obj.save_json(path_generator)

    generator_obj = ClassLoader(path_generator)
    data_generator = generator_obj.find_and_build()(path_generator)

    assert len(data_generator) == 4996
Exemplo n.º 2
0
def test_generator_tif_creation(tmp_path):

    generator_param = {}

    # We are reusing the data generator for training here.
    generator_param["type"] = "generator"
    generator_param["name"] = "SingleTifGenerator"
    generator_param["pre_post_frame"] = 30
    generator_param["pre_post_omission"] = 0
    generator_param[
        "steps_per_epoch"
    ] = -1

    generator_param["train_path"] = os.path.join(
        pathlib.Path(__file__).parent.absolute(),
        "..",
        "sample_data",
        "ophys_tiny_761605196.tif",
    )

    generator_param["batch_size"] = 5
    generator_param["start_frame"] = 0
    generator_param["end_frame"] = 99
    generator_param[
        "randomize"
    ] = 0

    path_generator = os.path.join(tmp_path, "generator.json")
    json_obj = JsonSaver(generator_param)
    json_obj.save_json(path_generator)

    generator_obj = ClassLoader(path_generator)
    data_generator = generator_obj.find_and_build()(path_generator)

    assert len(data_generator) == 8
Exemplo n.º 3
0
def _get_ephys_model(jobdir, generator_params, inference_params):
    path_generator = os.path.join(jobdir, "generator.json")
    json_obj = JsonSaver(generator_params)
    json_obj.save_json(path_generator)

    path_infer = os.path.join(jobdir, "inferrence.json")
    json_obj = JsonSaver(inference_params)
    json_obj.save_json(path_infer)

    generator_obj = ClassLoader(path_generator)
    data_generator = generator_obj.find_and_build()(path_generator)

    inferrence_obj = ClassLoader(path_infer)
    model = inferrence_obj.find_and_build()(path_infer, data_generator)
    return model
Exemplo n.º 4
0
training_param[
    "model_string"] = network_param["name"] + "_" + training_param["loss"]

jobdir = (
    "/Users/jeromel/Documents/Work documents/Allen Institute/Projects/Deep2P/examples/tiny_training/"
    + training_param["model_string"] + "_" + run_uid)

training_param["output_dir"] = jobdir

try:
    os.mkdir(jobdir)
except:
    print("folder already exists")

path_training = os.path.join(jobdir, "training.json")
json_obj = JsonSaver(training_param)
json_obj.save_json(path_training)

path_generator = os.path.join(jobdir, "generator.json")
json_obj = JsonSaver(generator_param)
json_obj.save_json(path_generator)

path_test_generator = os.path.join(jobdir, "test_generator.json")
json_obj = JsonSaver(generator_test_param)
json_obj.save_json(path_test_generator)

path_network = os.path.join(jobdir, "network.json")
json_obj = JsonSaver(network_param)
json_obj.save_json(path_network)

generator_obj = ClassLoader(path_generator)
Exemplo n.º 5
0
    inferrence_param["name"] = "fmri_inferrence"
    inferrence_param[
        "model_path"] = "/Users/jeromel/Documents/Work documents/Allen Institute/Projects/Deep2P/fMRI/trained_fmri_models/fmri_volume_dense_denoiser_mean_absolute_error_2020_08_25_23_54_2020_08_25_23_54/2020_08_25_23_54_fmri_volume_dense_denoiser_mean_absolute_error_2020_08_25_23_54_model.h5"

    inferrence_param["output_file"] = os.path.join(
        path_output, "denoised_" + indiv_inferrence_file)

    jobdir = "/Users/jeromel/Documents/Work documents/Allen Institute/Projects/Deep2P/fMRI/studyimagenet/denoised"

    try:
        os.mkdir(jobdir)
    except:
        print("folder already exists")

    path_generator = os.path.join(jobdir, "generator.json")
    json_obj = JsonSaver(generator_param)
    json_obj.save_json(path_generator)

    path_infer = os.path.join(jobdir, "inferrence.json")
    json_obj = JsonSaver(inferrence_param)
    json_obj.save_json(path_infer)

    generator_obj = ClassLoader(path_generator)
    data_generator = generator_obj.find_and_build()(path_generator)

    inferrence_obj = ClassLoader(path_infer)
    inferrence_class = inferrence_obj.find_and_build()(path_infer,
                                                       data_generator)

    inferrence_class.run()
def main(argv):
    opts, args = getopt.getopt(
        argv,
        [],
        [
            "movie_path=",
            "frame_start=",
            "frame_end=",
            "output_file=",
            "model_file=",
            "batch_size=",
            "pre_frame=",
            "post_frame=",
            "model_norm=",
            "save_raw=",
        ],
    )

    # default
    save_raw = False

    for opt, arg in opts:
        if opt == "--movie_path":
            movie_path = arg
        if opt == "--frame_start":
            input_frames_start = np.int(arg)
        if opt == "--frame_end":
            input_frames_end = np.int(arg)
        if opt == "--output_file":
            output_file = arg
        if opt == "--model_file":
            model_path = arg
        if opt == "--batch_size":
            batch_size = int(arg)
        if opt == "--pre_frame":
            pre_frame = int(arg)
        if opt == "--post_frame":
            post_frame = int(arg)
        if opt == "--save_raw":
            save_raw = bool(arg)

    NotDone = True

    generator_param = {}
    inferrence_param = {}

    generator_param["type"] = "generator"
    generator_param["name"] = "OphysGenerator"
    generator_param["pre_frame"] = pre_frame
    generator_param["post_frame"] = post_frame

    # This is meant to allow compatibility with a generator also used in training
    generator_param["steps_per_epoch"] = 100

    generator_param["batch_size"] = batch_size
    generator_param["start_frame"] = input_frames_start
    generator_param["end_frame"] = input_frames_end
    generator_param["movie_path"] = movie_path
    generator_param["randomize"] = 0

    inferrence_param["type"] = "inferrence"
    inferrence_param["name"] = "core_inferrence"
    inferrence_param["model_path"] = model_path
    inferrence_param["output_file"] = output_file
    inferrence_param["save_raw"] = save_raw

    while NotDone:
        path_generator = output_file + ".generator.json"
        json_obj = JsonSaver(generator_param)
        json_obj.save_json(path_generator)

        path_infer = output_file + ".inferrence.json"
        json_obj = JsonSaver(inferrence_param)
        json_obj.save_json(path_infer)

        generator_obj = ClassLoader(path_generator)
        data_generator = generator_obj.find_and_build()(path_generator)

        inferrence_obj = ClassLoader(path_infer)
        inferrence_class = inferrence_obj.find_and_build()(path_infer,
                                                           data_generator)

        inferrence_class.run()
        NotDone = False

    # to notify process is finished
    finish_file = h5py.File(output_file + ".done", "w")
    finish_file.close()
Exemplo n.º 7
0
def test_ephys_training(tmp_path):

    # Initialize meta-parameters objects
    training_param = {}
    generator_param = {}
    network_param = {}
    generator_test_param = {}

    steps_per_epoch = 2

    generator_test_param["type"] = "generator"  # type of collection
    generator_test_param["name"] = "EphysGenerator"
    generator_test_param[
        "pre_post_frame"
    ] = 30  # Number of frame provided before and after the predicted frame
    generator_test_param["train_path"] = os.path.join(
        pathlib.Path(__file__).parent.absolute(),
        "..",
        "sample_data",
        "ephys_tiny_continuous.dat2",
    )
    generator_test_param["batch_size"] = 10
    generator_test_param["start_frame"] = 0
    generator_test_param["end_frame"] = 30
    generator_test_param[
        "pre_post_omission"
    ] = 1  # Number of frame omitted before and after the predicted frame
    generator_test_param[
        "steps_per_epoch"
    ] = -1

    generator_param["type"] = "generator"
    generator_param["steps_per_epoch"] = steps_per_epoch
    generator_param["name"] = "EphysGenerator"
    generator_param["pre_post_frame"] = 30
    generator_param["train_path"] = os.path.join(
        pathlib.Path(__file__).parent.absolute(),
        "..",
        "sample_data",
        "ephys_tiny_continuous.dat2",
    )
    generator_param["batch_size"] = 10
    generator_param["start_frame"] = 2000
    generator_param["end_frame"] = 2030
    generator_param["pre_post_omission"] = 1

    # Those are parameters used for the network topology
    network_param["type"] = "network"
    network_param[
        "name"
    ] = "unet_single_ephys_1024"  # Name of network topology in the collection

    # Those are parameters used for the training process
    training_param["type"] = "trainer"
    training_param["name"] = "core_trainer"
    training_param["run_uid"] = 'tmp'
    training_param["batch_size"] = generator_test_param["batch_size"]
    training_param["steps_per_epoch"] = steps_per_epoch
    training_param[
        "period_save"
    ] = 25
    training_param["nb_gpus"] = 0
    training_param["apply_learning_decay"] = 0
    training_param[
        "nb_times_through_data"
    ] = 1
    training_param["learning_rate"] = 0.0001
    training_param["pre_post_frame"] = generator_test_param["pre_post_frame"]
    training_param["loss"] = "mean_absolute_error"
    training_param[
        "nb_workers"
    ] = 1

    training_param["model_string"] = (
        network_param["name"]
        + "-"
        + training_param["loss"]
    )
    jobdir = tmp_path

    training_param["output_dir"] = os.fspath(jobdir)

    path_training = os.path.join(jobdir, "training.json")
    json_obj = JsonSaver(training_param)
    print(path_training)
    json_obj.save_json(path_training)

    path_generator = os.path.join(jobdir, "generator.json")
    json_obj = JsonSaver(generator_param)
    json_obj.save_json(path_generator)

    path_test_generator = os.path.join(jobdir, "test-generator.json")
    json_obj = JsonSaver(generator_test_param)
    json_obj.save_json(path_test_generator)

    path_network = os.path.join(jobdir, "network.json")
    json_obj = JsonSaver(network_param)
    json_obj.save_json(path_network)

    # We find the generator obj in the collection using the json file
    generator_obj = ClassLoader(path_generator)
    generator_test_obj = ClassLoader(path_test_generator)

    # We find the network obj in the collection using the json file
    network_obj = ClassLoader(path_network)

    # We find the training obj in the collection using the json file
    trainer_obj = ClassLoader(path_training)

    train_generator = generator_obj.find_and_build()(path_generator)
    test_generator = generator_test_obj.find_and_build()(path_test_generator)

    network_callback = network_obj.find_and_build()(path_network)

    training_class = trainer_obj.find_and_build()(
        train_generator, test_generator, network_callback, path_training
    )

    training_class.run()

    # Finalize and save output of the training.
    training_class.finalize()

    # Validation is a bit random due to initilization. We check that you get
    # reasonable number
    assert training_class.model_train.history["val_loss"][-1] < 1
Exemplo n.º 8
0
def main(argv):
    opts, args = getopt.getopt(
        argv,
        [],
        [
            "json_path=",
            "output_file=",
            "model_file=",
            "batch_size=",
            "pre_frame=",
            "post_frame=",
        ],
    )

    for opt, arg in opts:
        if opt == "--json_path":
            json_path = arg
        if opt == "--output_file":
            output_file = arg
        if opt == "--model_file":
            model_path = arg
        if opt == "--batch_size":
            batch_size = int(arg)
        if opt == "--pre_frame":
            pre_frame = int(arg)
        if opt == "--post_frame":
            post_frame = int(arg)

    NotDone = True

    generator_param = {}
    inferrence_param = {}

    generator_param["type"] = "generator"
    generator_param["name"] = "MovieJSONGenerator"
    generator_param["pre_frame"] = pre_frame
    generator_param["post_frame"] = post_frame
    generator_param["batch_size"] = batch_size
    generator_param["train_path"] = json_path

    # This parameter is not used in this context but is needed
    generator_param["steps_per_epoch"] = 10

    inferrence_param["type"] = "inferrence"
    inferrence_param["name"] = "core_inferrence"
    inferrence_param["model_path"] = model_path
    inferrence_param["output_file"] = output_file
    inferrence_param["save_raw"] = True
    inferrence_param["rescale"] = False

    while NotDone:
        path_generator = output_file + ".generator.json"
        json_obj = JsonSaver(generator_param)
        json_obj.save_json(path_generator)

        path_infer = output_file + ".inferrence.json"
        json_obj = JsonSaver(inferrence_param)
        json_obj.save_json(path_infer)

        generator_obj = ClassLoader(path_generator)
        data_generator = generator_obj.find_and_build()(path_generator)

        inferrence_obj = ClassLoader(path_infer)
        inferrence_class = inferrence_obj.find_and_build()(path_infer, data_generator)

        inferrence_class.run()
        NotDone = False

    # to notify process is finished
    finish_file = h5py.File(output_file + ".done", "w")
    finish_file.close()
training_param["model_string"] = ("transfer" + "_" + training_param["loss"] +
                                  "_" + training_param["run_uid"])

jobdir = ("/projectnb/jchenlab/trained_models/" +
          training_param["model_string"] + "_" + run_uid)

training_param["output_dir"] = jobdir

try:
    os.mkdir(jobdir, 0o775)
except:
    print("folder already exists")

path_training = os.path.join(jobdir, "training.json")
json_obj = JsonSaver(training_param)
json_obj.save_json(path_training)

list_train_generator = []
for local_index, indiv_generator in enumerate(generator_param_list):

    path_generator = os.path.join(jobdir,
                                  "generator" + str(local_index) + ".json")
    json_obj = JsonSaver(indiv_generator)
    json_obj.save_json(path_generator)
    generator_obj = ClassLoader(path_generator)
    train_generator = generator_obj.find_and_build()(path_generator)

    list_train_generator.append(train_generator)

path_test_generator = os.path.join(jobdir, "test_generator.json")
def main(argv):
    opts, args = getopt.getopt(
        argv,
        [],
        [
            "movie_path=",
            "train_frame_start=",
            "train_frame_end=",
            "train_total_samples=",
            "val_frame_start=",
            "val_frame_end=",
            "val_total_samples=",
            "output_path=",
            "model_file=",
            "batch_size=",
            "pre_post_frame=",
            "pre_post_omission=",
            "loss=",
        ],
    )

    # default
    train_frame_start = 20000
    train_frame_end = -1
    train_total_samples = 10000000
    val_frame_start = 0
    val_frame_end = 19999
    val_total_samples = -1
    batch_size = 100
    pre_post_frame = 30
    pre_post_omission = 1
    loss = 'mean_squared_error'

    for opt, arg in opts:
        if opt == "--movie_path":
            movie_path = arg
        if opt == "--train_frame_start":
            train_frame_start = np.int(arg)
        if opt == "--train_frame_end":
            train_frame_end = np.int(arg)
        if opt == "--train_total_samples":
            train_total_samples = np.int(arg)
        if opt == "--val_frame_start":
            val_frame_start = np.int(arg)
        if opt == "--val_frame_end":
            val_frame_end = np.int(arg)
        if opt == "--val_total_samples":
            val_total_samples = np.int(arg)
        if opt == "--batch_size":
            batch_size = np.int(arg)
        if opt == "--output_path":
            output_path = arg
        if opt == "--model_file":
            model_file = arg
        if opt == "--batch_size":
            batch_size = np.int(arg)
        if opt == "--pre_post_frame":
            pre_post_frame = np.int(arg)
        if opt == "--pre_post_omission":
            pre_post_omission = np.int(arg)
        if opt == "--loss":
            loss = arg

    now = datetime.datetime.now()
    run_uid = now.strftime("%Y_%m_%d_%H_%M")

    training_param = {}

    generator_test_param = {}

    generator_param = {}
    generator_param["type"] = "generator"
    generator_param["name"] = "EphysGenerator"
    generator_param["pre_post_frame"] = pre_post_frame
    generator_param["train_path"] = movie_path
    generator_param["batch_size"] = batch_size  # 100
    generator_param["start_frame"] = train_frame_start  # 20000
    generator_param["end_frame"] = train_frame_end  # -1
    generator_param["pre_post_omission"] = pre_post_omission  # 1
    generator_param["randomize"] = 1
    generator_param["steps_per_epoch"] = 100
    generator_param["total_samples"] = train_total_samples  # 10000000

    generator_test_param["type"] = "generator"
    generator_test_param["name"] = "EphysGenerator"
    generator_test_param["pre_post_frame"] = pre_post_frame
    generator_test_param["train_path"] = movie_path
    generator_test_param["batch_size"] = batch_size
    generator_test_param["start_frame"] = val_frame_start  # 0
    generator_test_param["end_frame"] = val_frame_end  # 19999
    generator_test_param["pre_post_omission"] = pre_post_omission
    generator_test_param["randomize"] = 1
    generator_test_param["steps_per_epoch"] = -1
    generator_test_param["total_samples"] = val_total_samples  # -1

    training_param["type"] = "trainer"
    training_param["name"] = "transfer_trainer"
    training_param["model_path"] = model_file

    training_param["run_uid"] = run_uid
    training_param["batch_size"] = generator_test_param["batch_size"]
    training_param["steps_per_epoch"] = generator_param["steps_per_epoch"]
    training_param["period_save"] = 25
    training_param["nb_gpus"] = 1
    training_param["nb_times_through_data"] = 1
    training_param["learning_rate"] = 0.0005
    training_param["apply_learning_decay"] = 1
    training_param["initial_learning_rate"] = 0.0005
    training_param["epochs_drop"] = 300
    training_param["loss"] = loss
    training_param["model_string"] = 'transfer_train_' + \
        "_" + training_param["loss"]

    training_param["output_dir"] = output_path

    try:
        os.mkdir(output_path)
    except Exception:
        print("folder already exists")

    path_training = os.path.join(output_path, "training.json")
    json_obj = JsonSaver(training_param)
    json_obj.save_json(path_training)

    path_generator = os.path.join(output_path, "generator.json")
    json_obj = JsonSaver(generator_param)
    json_obj.save_json(path_generator)
    generator_obj = ClassLoader(path_generator)
    train_generator = generator_obj.find_and_build()(path_generator)

    path_test_generator = os.path.join(output_path, "test_generator.json")
    json_obj = JsonSaver(generator_test_param)
    json_obj.save_json(path_test_generator)

    generator_test_obj = ClassLoader(path_test_generator)

    trainer_obj = ClassLoader(path_training)

    test_generator = generator_test_obj.find_and_build()(path_test_generator)

    training_class = trainer_obj.find_and_build()(train_generator,
                                                  test_generator,
                                                  path_training)

    training_class.run()

    training_class.finalize()
Exemplo n.º 11
0
def main(argv):
    opts, args = getopt.getopt(
        argv,
        [],
        [
            "movie_path=",
            "frame_start=",
            "frame_end=",
            "output_file=",
            "model_file=",
            "batch_size=",
            "pre_post_frame=",
            "model_norm=",
            "pre_post_omission=",
        ],
    )

    for opt, arg in opts:
        if opt == "--movie_path":
            movie_path = arg
        if opt == "--frame_start":
            input_frames_start = np.int(arg)
        if opt == "--frame_end":
            input_frames_end = np.int(arg)
        if opt == "--output_file":
            output_file = arg
        if opt == "--model_file":
            model_path = arg
        if opt == "--batch_size":
            batch_size = int(arg)
        if opt == "--pre_post_frame":
            pre_post_frame = int(arg)
        if opt == "--pre_post_omission":
            pre_post_omission = int(arg)

    model = load_model(model_path)

    frame_start = input_frames_start
    frame_end = input_frames_end

    NotDone = True
    trial = 0

    generator_param = {}
    inferrence_param = {}

    generator_param["type"] = "generator"
    generator_param["name"] = "EphysGenerator"
    generator_param["pre_post_frame"] = pre_post_frame
    generator_param["pre_post_omission"] = pre_post_omission
    generator_param["batch_size"] = batch_size
    generator_param["start_frame"] = input_frames_start
    generator_param["end_frame"] = input_frames_end
    generator_param["randomize"] = 0

    generator_param["train_path"] = movie_path

    inferrence_param["type"] = "inferrence"
    inferrence_param["name"] = "core_inferrence"
    inferrence_param["model_path"] = model_path
    inferrence_param["output_file"] = output_file

    while NotDone:
        path_generator = output_file + ".generator.json"
        json_obj = JsonSaver(generator_param)
        json_obj.save_json(path_generator)

        path_infer = output_file + ".inferrence.json"
        json_obj = JsonSaver(inferrence_param)
        json_obj.save_json(path_infer)

        generator_obj = ClassLoader(path_generator)
        data_generator = generator_obj.find_and_build()(path_generator)

        inferrence_obj = ClassLoader(path_infer)
        inferrence_class = inferrence_obj.find_and_build()(path_infer,
                                                           data_generator)

        inferrence_class.run()
        NotDone = False

    # to notify process is finished
    finish_file = h5py.File(output_file + ".done", "w")
    finish_file.close()