Esempio n. 1
0
def prep_cellfinder_general():
    args = parser.cellfinder_parser().parse_args()
    arg_groups = get_arg_groups(args, cellfinder_parser())

    # args = define_pixel_sizes(args)
    check_input_arg_existance(args)

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    args.paths = Paths(args.output_dir)

    fancylog.start_logging(
        args.output_dir,
        program_for_log,
        variables=[args, args.paths],
        verbose=args.debug,
        log_header="CELLFINDER LOG",
    )

    log_metadata(args.paths.metadata_path, args)

    what_to_run = CalcWhatToRun(args)
    args.signal_ch_ids, args.background_ch_id = check_and_return_ch_ids(
        args.signal_ch_ids, args.background_ch_id, args.signal_planes_paths)
    args.brainreg_paths = BrainRegPaths(args.paths.registration_output_folder)
    atlas = BrainGlobeAtlas(args.atlas)
    return args, arg_groups, what_to_run, atlas
Esempio n. 2
0
def setup(config_path):
    if len(sys.argv) == 2 and sys.argv[1] != "-h":
        args = experiment_parser.GetArgs(sys.argv[1])
    else:
        args = cli_parser.get_args()

    ensure_directory_exists(args.output_dir)

    if args.config_path is None:
        args.config_path = config_path

    config = config_parser.GetConfig(args.config_path)
    options = config_parser.GetOptions(args.config_path)
    options.num_processes = get_num_processes(
        min_free_cpu_cores=options.n_free_cpus)

    fancylog.start_logging(
        args.output_dir,
        program_for_log,
        variables=[args],
        verbose=args.verbose,
        log_header="OPENDIRECTION LOG",
    )

    return args, options, config
def main():
    start_time = datetime.now()
    args = cells_standard_space_cli_parser().parse_args()
    args.paths = prep.Paths(args.output_dir)
    args.paths.standard_space_output_folder = args.output_dir
    args.paths.cells_in_standard_space = join(
        args.paths.output_dir, "cells_in_standard_space.xml"
    )
    cli_path_update(args.paths, args)
    args.paths.make_invert_cell_position_paths()
    args = define_pixel_sizes(args)

    # TODO: implement a recursive function to remove the need to do this
    # (probably using pathlib)
    ensure_directory_exists(args.paths.output_dir)
    ensure_directory_exists(args.paths.standard_space_output_folder)
    fancylog.start_logging(
        args.paths.output_dir,
        log_to_file=False,
        package=program_name,
        variables=args,
        verbose=args.debug,
        filename="cells_to_standard_space",
        log_header="CELL TRANSFORMATION TO STANDARD SPACE LOG",
    )
    logging.info("Starting transformation of cell positions")
    transform_cells_to_standard_space(args)
    logging.info("Finished. Total time taken: %s", datetime.now() - start_time)
def main():
    args = extraction_parser().parse_args()

    # Get output directory
    if args.output_directory is None:
        outdir = os.get_cwd()
    elif not os.path.isdir(args.output_directory):
        raise ValueError("Output directory invalid")
    else:
        outdir = args.output_directory

    # Start log
    log_name = "injection_finder_{}".format(
        os.path.split(args.registration_folder)[-1])
    fancylog.start_logging(outdir, package, filename=log_name, verbose=True)

    # Start extraction
    Extractor(
        args.img_filepath,
        args.registration_folder,
        logging,
        overwrite=args.overwrite,
        gaussian_kernel=args.gaussian_kernel,
        percentile_threshold=args.percentile_threshold,
        threshold_type=args.threshold_type,
        obj_path=args.obj_path,
        overwrite_registration=args.overwrite_registration,
    )
Esempio n. 5
0
def main(directory):
    args = MadeUpArgs()
    verbose = True

    fancylog.start_logging(
        directory, package, variables=[args, args.paths], verbose=verbose
    )

    logging.info("This is an info message")
    logging.debug("This is a debug message")
    logging.warning("This fun logging experience is about to end :(")
Esempio n. 6
0
def run():
    start_time = datetime.now()
    args = register_cli_parser().parse_args()
    args = define_pixel_sizes(args)

    args, additional_images_downsample = prep_registration(args)
    args = make_paths_absolute(args)

    fancylog.start_logging(
        args.registration_output_folder,
        program_for_log,
        variables=[args],
        verbose=args.debug,
        log_header="AMAP LOG",
        multiprocessing_aware=False,
    )

    logging.info("Starting registration")
    register(
        args.registration_config,
        args.image_paths,
        args.registration_output_folder,
        x_pixel_um=args.x_pixel_um,
        y_pixel_um=args.y_pixel_um,
        z_pixel_um=args.z_pixel_um,
        orientation=args.orientation,
        flip_x=args.flip_x,
        flip_y=args.flip_y,
        flip_z=args.flip_z,
        rotation=args.rotation,
        affine_n_steps=args.affine_n_steps,
        affine_use_n_steps=args.affine_use_n_steps,
        freeform_n_steps=args.freeform_n_steps,
        freeform_use_n_steps=args.freeform_use_n_steps,
        bending_energy_weight=args.bending_energy_weight,
        grid_spacing=args.grid_spacing,
        smoothing_sigma_reference=args.smoothing_sigma_reference,
        smoothing_sigma_floating=args.smoothing_sigma_floating,
        histogram_n_bins_floating=args.histogram_n_bins_floating,
        histogram_n_bins_reference=args.histogram_n_bins_reference,
        sort_input_file=args.sort_input_file,
        n_free_cpus=args.n_free_cpus,
        save_downsampled=not (args.no_save_downsampled),
        boundaries=not (args.no_boundaries),
        additional_images_downsample=additional_images_downsample,
        debug=args.debug,
    )

    logging.info("Finished. Total time taken: %s", datetime.now() - start_time)
Esempio n. 7
0
def main():
    start_time = datetime.now()
    args = register_cli_parser().parse_args()
    arg_groups = get_arg_groups(args, register_cli_parser())
    args = define_pixel_sizes(args)

    args, additional_images_downsample = prep_registration(args)

    paths = Paths(args.brainreg_directory)

    log_metadata(paths.metadata_path, args)

    fancylog.start_logging(
        paths.registration_output_folder,
        program_for_log,
        variables=[args],
        verbose=args.debug,
        log_header="BRAINREG LOG",
        multiprocessing_aware=False,
    )

    logging.info("Starting registration")

    atlas = BrainGlobeAtlas(args.atlas)

    register(
        atlas,
        args.orientation,
        args.image_paths,
        paths,
        arg_groups["NiftyReg registration backend options"],
        x_pixel_um=args.x_pixel_um,
        y_pixel_um=args.y_pixel_um,
        z_pixel_um=args.z_pixel_um,
        sort_input_file=args.sort_input_file,
        n_free_cpus=args.n_free_cpus,
        additional_images_downsample=additional_images_downsample,
        backend=args.backend,
        debug=args.debug,
    )

    logging.info("Finished. Total time taken: %s", datetime.now() - start_time)
Esempio n. 8
0
def prep_cellfinder_general():
    args = parser.cellfinder_parser().parse_args()
    args = define_pixel_sizes(args)
    check_input_arg_existance(args)

    if not os.path.exists(args.output_dir):
        os.makedirs(args.output_dir)

    args.paths = Paths(args.output_dir)
    args.paths.make_reg_paths()

    fancylog.start_logging(
        args.output_dir,
        program_for_log,
        variables=[args, args.paths],
        verbose=args.debug,
        log_header="CELLFINDER LOG",
    )

    what_to_run = CalcWhatToRun(args)
    args.signal_ch_ids, args.background_ch_id = check_and_return_ch_ids(
        args.signal_ch_ids, args.background_ch_id, args.signal_planes_paths)
    return args, what_to_run
Esempio n. 9
0
def mult_exp_setup():
    args = get_args()
    ensure_directory_exists(args.output_dir)
    options = config_parser.GetOptions(args.options)

    num_processes = get_num_processes(min_free_cpu_cores=args.n_free_cpus)
    options.num_processes = num_processes
    fancylog.start_logging(
        args.output_dir,
        program_for_log,
        variables=[args],
        verbose=args.verbose,
        log_header="OPENDIRECTION MULTI EXPERIMENT LOG",
    )

    experiment_files = glob(os.path.join(args.exp_files, "*.txt"))
    logging.info(f"Found {len(experiment_files)} experiment files")
    experiment_config_list = [
        experiment_parser.GetArgs(experiment_file)
        for experiment_file in experiment_files
    ]

    return args, options, experiment_config_list, num_processes
Esempio n. 10
0
def main():
    args = extraction_parser().parse_args()

    # Get output directory
    if args.output_directory is None:
        outdir = os.getcwd()
    elif not os.path.isdir(args.output_directory):
        raise ValueError("Output directory invalid")
    else:
        outdir = args.output_directory

    if args.obj_path is None:
        args.obj_path = Path(args.img_filepath).with_suffix(".obj")
    else:
        args.obj_path = Path(args.obj_path)

    # Start log
    fancylog.start_logging(
        outdir,
        package_for_log,
        filename="injection_finder",
        verbose=args.debug,
        log_to_file=args.save_log,
    )

    # Start extraction
    Extractor(
        args.img_filepath,
        args.registration_folder,
        overwrite=args.overwrite,
        gaussian_kernel=args.gaussian_kernel,
        gaussian_kernel_z=args.gaussian_kernel_z,
        percentile_threshold=args.percentile_threshold,
        threshold_type=args.threshold_type,
        obj_path=args.obj_path,
        overwrite_registration=args.overwrite_registration,
    )
Esempio n. 11
0
def main():
    from cellfinder.main import suppress_tf_logging

    suppress_tf_logging(tf_suppress_log_messages)

    from tensorflow.keras.callbacks import (
        TensorBoard,
        ModelCheckpoint,
        CSVLogger,
    )

    from cellfinder.tools.prep import prep_training
    from cellfinder.classify.tools import make_lists, get_model
    from cellfinder.classify.cube_generator import CubeGeneratorFromDisk

    start_time = datetime.now()
    args = training_parse()
    output_dir = Path(args.output_dir)
    ensure_directory_exists(output_dir)
    args = prep_training(args)

    fancylog.start_logging(
        args.output_dir,
        program_for_log,
        variables=[args],
        log_header="CELLFINDER TRAINING LOG",
    )

    yaml_contents = parse_yaml(args.yaml_file)

    tiff_files = get_tiff_files(yaml_contents)
    logging.info(f"Found {sum(len(imlist) for imlist in tiff_files)} images "
                 f"from {len(yaml_contents)} datasets "
                 f"in {len(args.yaml_file)} yaml files")

    model = get_model(
        existing_model=args.trained_model,
        model_weights=args.model_weights,
        network_depth=models[args.network_depth],
        learning_rate=args.learning_rate,
        continue_training=args.continue_training,
    )

    signal_train, background_train, labels_train = make_lists(tiff_files)

    if args.test_fraction > 0:
        logging.info("Splitting data into training and validation datasets")
        (
            signal_train,
            signal_test,
            background_train,
            background_test,
            labels_train,
            labels_test,
        ) = train_test_split(
            signal_train,
            background_train,
            labels_train,
            test_size=args.test_fraction,
        )

        logging.info(f"Using {len(signal_train)} images for training and "
                     f"{len(signal_test)} images for validation")
        validation_generator = CubeGeneratorFromDisk(
            signal_test,
            background_test,
            labels=labels_test,
            batch_size=args.batch_size,
            train=True,
        )

        # for saving checkpoints
        base_checkpoint_file_name = "-epoch.{epoch:02d}-loss-{val_loss:.3f}.h5"

    else:
        logging.info("No validation data selected.")
        validation_generator = None
        base_checkpoint_file_name = "-epoch.{epoch:02d}.h5"

    training_generator = CubeGeneratorFromDisk(
        signal_train,
        background_train,
        labels=labels_train,
        batch_size=args.batch_size,
        shuffle=True,
        train=True,
        augment=not args.no_augment,
    )
    callbacks = []

    if args.tensorboard:
        logdir = output_dir / "tensorboard"
        ensure_directory_exists(logdir)
        tensorboard = TensorBoard(
            log_dir=logdir,
            histogram_freq=0,
            write_graph=True,
            update_freq="epoch",
        )
        callbacks.append(tensorboard)

    if not args.no_save_checkpoints:
        if args.save_weights:
            filepath = str(output_dir / ("weight" + base_checkpoint_file_name))
        else:
            filepath = str(output_dir / ("model" + base_checkpoint_file_name))

        checkpoints = ModelCheckpoint(
            filepath,
            save_weights_only=args.save_weights,
        )
        callbacks.append(checkpoints)

    if args.save_progress:
        filepath = str(output_dir / "training.csv")
        csv_logger = CSVLogger(filepath)
        callbacks.append(csv_logger)

    logging.info("Beginning training.")
    model.fit(
        training_generator,
        validation_data=validation_generator,
        use_multiprocessing=False,
        epochs=args.epochs,
        callbacks=callbacks,
    )

    if args.save_weights:
        logging.info("Saving model weights")
        model.save_weights(str(output_dir / "model_weights.h5"))
    else:
        logging.info("Saving model")
        model.save(output_dir / "model.h5")

    logging.info(
        "Finished training, "
        "Total time taken: %s",
        datetime.now() - start_time,
    )
Esempio n. 12
0
def sort_mantis_files():
    exp_dirs = get_subdirs(raw_tosort_fld)

    if not exp_dirs:
        return

    # Start logging
    logging.disable(logging.NOTSET)
    fancylog.start_logging(raw_tosort_fld,
                           package,
                           verbose=True,
                           filename='mantis_sorter')
    logging.info("Starting to process mantis files")

    # Loop over subdirs
    for subdir in exp_dirs:
        # --------------------------------- GET FILES -------------------------------- #
        logging.info("  processing: {}".format(subdir))
        files = [f for f in listdir(subdir) if f.endswith('.tdms')]
        if not files: continue
        if len(files) > 3:
            raise NotImplementedError("Can't deal with this many files!")
        elif len(files) < 3:
            raise ValueError("Found too few files")

        for f in files:
            for i in range(10):
                if "({})".format(i + 1) in f:
                    raise NotImplementedError(
                        "Cannot deal with how files are organised in the folder, sorry. "
                    )

        # Infer what the experiment name is
        metadata_file = [f for f in files if 'meta.tdms' in f]
        if not metadata_file:
            logging.warning("Failed to find metadata file")
            raise FileNotFoundError("Could not find metadata file")
        else:
            metadata_file = metadata_file[0]

        # Get AI file
        exp_name = os.path.split(metadata_file)[-1].split("(0)")[0]
        inputs_file = [f for f in files if f.endswith(exp_name + '(0).tdms')]

        if not inputs_file:
            logging.warning("Failed to find analog inputs file")
            raise FileNotFoundError("Could not find analog inputs file")
        else:
            inputs_file = inputs_file[0]

        # Get video file
        video_file = [
            f for f in files if f != inputs_file and f != metadata_file
        ][0]

        # ---------------------- CHECK IF MANTIS DROPPED FRAMES ---------------------- #
        camera_name = os.path.split(video_file)[-1].split("(0)-")[-1].split(
            ".")[0]
        check = check_mantis_dropped_frames(subdir,
                                            camera_name,
                                            exp_name,
                                            skip_analog_inputs=True)
        if check:
            logging.info("      Mantis didn't drop any frames for video file")
        else:
            logging.info("      Mantis dropped some frames, darn it.")

        # -------------------------------- MOVE FILES -------------------------------- #
        # Get destination files
        subshort = os.path.split(subdir)[-1]
        vdest = os.path.join(raw_video_fld, subshort + '_video.tdms')
        mdest = os.path.join(raw_metadata_fld,
                             subshort + '_video_metadata.tdms')
        adest = os.path.join(raw_analog_inputs_fld, subshort + '_AI.tdms')

        logging.info("      Video file: {} -> {}".format(video_file, vdest))
        logging.info("      Metadata file: {} -> {}".format(
            metadata_file, mdest))
        logging.info("      Analog inputs file: {} -> {}".format(
            inputs_file, adest))

        # Move files
        for src, dest in tqdm(
                zip([video_file, metadata_file, inputs_file],
                    [vdest, mdest, adest])):
            if os.path.isfile(dest):
                logging.warning(
                    "      The destination file {} already exists, stopping to avoid overwriting"
                    .format(dest))
                raise FileExistsError(
                    "      The destination file {} already exists, stopping to avoid overwriting"
                    .format(dest))
            os.rename(src, dest)

    # disable logging
    logging.disable(sys.maxsize)
Esempio n. 13
0
import os
from injection_finder.extraction.extraction import Extractor

import logging
from fancylog import fancylog
import fancylog as package

mouse='AY_254_2'    
channel='1'
rgf = 'Z:\\swc\\branco\\BrainSaw\\{}\\cellfinder\\registration'.format(mouse)
data = os.path.join(rgf, 'downsampled_channel_{}.nii'.format(channel))
out_fld = 'Z:\\swc\\branco\\BrainSaw\\injections'
out_path = os.path.join(out_fld, mouse+'_ch{}inj.obj'.format(channel))


fancylog.start_logging(
        rgf, package, verbose=True)


Extractor(
        data, 
        rgf, 
        logging, 
        overwrite=True,
        percentile_threshold=99.95,
        threshold_type='perc',
        obj_path=out_path,
)