Ejemplo n.º 1
0
def transform_images(image_sequence,
                     processors_num=None,
                     use_shared_memory=False):

    ticker = Ticker()

    image_sequence["enhanced"] = ('i',
                                  rescale_intensity(
                                      image_sequence["enhanced"][1].astype(
                                          np.double)))
    image_sequence["skeletons"] = ('i',
                                   (image_sequence["skeletons"][1] > 0).astype(
                                       np.uint8))

    ticker.tick("\nTransforming images...")
    transform = ParallelMap(processors_num)
    image_sequence["binaries"] = ('i',
                                  transform.map(transform_binary,
                                                image_sequence["binaries"][1]))
    image_sequence["branching"] = ('i',
                                   transform.map(
                                       transform_distance,
                                       image_sequence["branching"][1]))
    ticker.tock(" Finished.")

    if use_shared_memory:
        ticker.tick("\nMapping to shared memory...")
        image_sequence_shared = to_shared_memory(image_sequence)
        ticker.tock(" Finished.")
        # Return shared memory
        return image_sequence_shared

    return image_sequence
Ejemplo n.º 2
0
def track_individual(input):

    # Initialize global variables
    global Global_Sequence
    global Global_Parameters
    global Global_Lock
    global Output_Folder

    # Initialize ticker
    ticker = Ticker(Global_Lock)

    # Unpack data
    index, data = input
    filename, initial_polyline = data

    ticker.tick(" Started tracking {0}.".format(filename))

    # Reparametrize contour
    new_x, new_y, new_step = reparametrize(initial_polyline[:, 0],
                                           initial_polyline[:, 1],
                                           Global_Parameters.delta, 'linear')

    initial_points = np.dstack([new_x, new_y])[0]
    tracking_result = None

    try:
        # Create tracker
        tracker = Tracker(Global_Sequence, Global_Parameters)

        # Initialize log
        log = ""
        result = tracker.track(initial_points, log)

        ticker.tock("  Finished: " + filename)

        if result is not None:
            if Global_Lock is not None:
                Global_Lock.acquire()
            try:
                zip_path = os.path.join(Output_Folder,
                                        '{0}.zip'.format(filename))
                # Save ZIP
                zip_csv(zip_path, result["snake_trajectory"][:, -1])
            except:
                pass
            if Global_Lock is not None:
                Global_Lock.release()
    except Exception as e:
        print e
        traceback.print_exc()
        ticker.tock("  Failed: " + filename)

    return tracking_result
Ejemplo n.º 3
0
def upload_sequence(path, processors_num=None, use_shared_memory=False):

    filenames = [
        'enhanced.tif',
        'skeletons.tif',
        'binaries.tif',
        'branching.tif',  # check if we really need this one
        'gvf_magnitude.tif',
        'gvf_angle.tif',
        'branching_coords.zip'
    ]

    ticker = Ticker()

    ticker.tick("\nLoading image sequence...")
    image_sequence = ImageSequence.load(path, filenames)
    ticker.tock(" Finished.")

    return transform_images(image_sequence, processors_num, use_shared_memory)
Ejemplo n.º 4
0
def main():

    # Read configuration
    common_config = Config(os.path.join('..', '..', 'config', 'common.config'))

    # Read image preprocessing configuration
    tracker_config = Config(
        os.path.join('..', '..', 'config', 'tracker.config'))

    # Get output folder
    output_folder = common_config['Output']['Folder']

    # Use multiprocessing
    is_parallel = tracker_config['Parallel Computing']['Enabled_b']

    # Read preprocessed data
    image_sequence = upload_sequence(os.path.join('..', '..', 'output',
                                                  'preprocessing',
                                                  output_folder),
                                     use_shared_memory=is_parallel)

    try:
        init_path = tracker_config['Initialization']['Path']
        if init_path == '':
            raise Exception("Empty Path")
    except:
        sequence_path = os.path.join('../../output/generator', output_folder)
        try:
            input_folder = get_latest_folder(sequence_path)
            init_path = os.path.join(sequence_path, input_folder,
                                     "filaments.zip")
        except:
            return

    ticker = Ticker()
    ticker.tick("\nReading initial filaments...")
    initialization = upload_initialization(init_path)
    ticker.tock(" Finished.")

    sequence_output = os.path.join("../../output/tracking", output_folder)
    try:
        os.mkdir(sequence_output)
    except:
        pass

    run_output = os.path.join(sequence_output,
                              datetime.now().strftime('%Y-%m-%d_%H-%M-%S'))
    try:
        os.mkdir(run_output)
    except:
        pass

    try:
        shutil.copy2(os.path.join(sequence_path, input_folder, 'colors.csv'),
                     run_output)
    except:
        pass

    # Track all filaments
    ticker.tick("\nStarting tracking...")
    track_all(initialization, image_sequence, tracker_config, run_output)
    ticker.tock(" Tracking completed!")
Ejemplo n.º 5
0
def main():

    ticker = Ticker()
    ticker.tick("Started filament generation...")

    input_dir = os.path.join("..", "..", "output", "preprocessing")
    output_dir = os.path.join("..", "..", "output", "generator")

    # Read configuration
    common_config = Config(os.path.join("..", "..", "config", "common.config"))

    # Read filaments generator configuration
    generator_config = Config(
        os.path.join("..", "..", "config", "generator.config"))

    folder_name = common_config["Output"]["Folder"]
    sequence_path = os.path.join(common_config["Image Sequence"]["Path"],
                                 common_config["Image Sequence"]["Filename"])

    # Generate filaments
    filaments_original, filaments_filtered = generator.generate(
        input_dir, folder_name, generator_config)

    directory = os.path.join(output_dir, folder_name)
    # Make folder
    try:
        os.mkdir(directory)
    except:
        pass

    # Output path
    current_date = datetime.now().strftime('%Y-%m-%d_%H-%M-%S')
    output_path = os.path.join(
        directory, "__".join([current_date,
                              str(len(filaments_filtered))]))

    try:
        os.mkdir(output_path)
    except:
        pass

    # Get colors
    colors = get_random_colors(len(filaments_filtered))

    # Get background image
    image_sequence = imread(sequence_path)

    background = np.squeeze(image_sequence)[0].astype(np.float32)
    background /= background.max()

    # Save filaments
    save_filaments(filaments_filtered, output_path, background, colors)

    # Plot sequence
    if __PLOT_OVERLAY__:
        plot_filaments(filaments_original, background, None,
                       "Original filaments")
        plot_filaments(filaments_filtered, background, colors,
                       "Filtered filaments", True)

    ticker.tock(" Generation finished.")
Ejemplo n.º 6
0
def finallize(directory, image_sequence, config):
    # Import imsave
    from skimage.external.tifffile import imsave
    from zip import zip_csv

    ticker = Ticker()

    ticker.tick("\nSaving to the folder: " + directory)

    # Make folder
    try:
        os.mkdir(directory)
        ticker.tock(" Folder was created.")
    except:
        ticker.tock(
            " The folder with this name exists, it's content might be overwritten."
        )

    # Obtain skeleton images
    skeletons = np.asarray([d["skeleton"] for d in image_sequence.image_data])
    # Obtain enhanced images
    enhanced = np.asarray(
        [d["enhanced_image"] for d in image_sequence.image_data])
    # Obtain binary images
    binaries = np.asarray(
        [d["binary_image"] for d in image_sequence.image_data])
    # Branching images
    branching_images = np.asarray(
        [d["branching_image"] for d in image_sequence.image_data])
    # Branching points coordinates
    branching_points = np.asarray(
        [d["branching_coords"] for d in image_sequence.image_data])

    ticker.tick("\nCalculating stable branching points...")
    stable_branching, stable_coordinates = detect_stable_points(
        enhanced, branching_images, branching_points)
    ticker.tock(" Finished.")

    ticker.tick("\nStart coding gradient vector flow...")
    gvf_encoded = encode_gvf(
        image_sequence,
        processors_num=config["Parallel Computing"]["processors_number_i"])
    ticker.tock(" Finished.")

    # TIFF metadata
    meta = {'axes': 'TZCYX'}

    ticker.tick("\nSaving files...")
    # Save skeleton images
    imsave(os.path.join(directory, 'skeletons.tif'),
           tyx_to_tzcyx(skeletons),
           metadata=meta)
    # Save enhanced images
    imsave(os.path.join(directory, 'enhanced.tif'),
           tyx_to_tzcyx(enhanced),
           metadata=meta)
    # Save binary images
    imsave(os.path.join(directory, 'binaries.tif'),
           tyx_to_tzcyx(binaries),
           metadata=meta)
    # Save branching images
    imsave(os.path.join(directory, 'branching.tif'),
           tyx_to_tzcyx(branching_images),
           metadata=meta)
    # Save branching points
    zip_csv(os.path.join(directory, "branching_coords.zip"),
            np.asarray(branching_points))
    # Save GVF
    imsave(os.path.join(directory, 'gvf_magnitude.tif'),
           tyx_to_tzcyx(gvf_encoded[:, 0], np.uint8, 1),
           metadata=meta)

    imsave(os.path.join(directory, 'gvf_angle.tif'),
           tyx_to_tzcyx(gvf_encoded[:, 1], np.uint8, 1),
           metadata=meta)
    # Save stable branching points
    imsave(os.path.join(directory, 'stable_branching.tif'),
           tyx_to_tzcyx(stable_branching, np.uint16, 1),
           metadata=meta)
    zip_csv(os.path.join(directory, 'stable_coords.zip'), stable_coordinates)

    ticker.tock(" Finished.")

    print "\nProcessing completed:", str(datetime.now())