Esempio n. 1
0
def main():
    """Run main SQLite access commands after loading CLI."""
    # parses arguments and sets up the DB
    from magmap.io import cli
    cli.main(True)
    conn = config.db.conn
    cur = config.db.cur
    if config.verified_db is not None:
        conn = config.verified_db.conn
        cur = config.verified_db.cur
Esempio n. 2
0
def launch_magmap():
    """Launch MagellanMapper.
    
    First launch the CLI to process user arguments, which will shut down
    the process afterward if a CLI task has been selected. If the process
    remains alive, the GUI will be launched.
    
    """
    from magmap.io import cli
    cli.main()
    from magmap.gui import visualizer
    visualizer.main()
Esempio n. 3
0
    elif df_task in _ARITHMETIC_TASKS:
        # perform arithmetic operations on pairs of columns in a data frame
        df = pd.read_csv(config.filename)
        fn = _ARITHMETIC_TASKS[df_task]
        for col_x, col_y, col_id in zip(libmag.to_seq(x_col),
                                        libmag.to_seq(y_col),
                                        libmag.to_seq(id_col)):
            # perform the arithmetic operation specified by the specific
            # task on the pair of columns, inserting the results in a new
            # column specified by ID
            func_to_paired_cols(df, col_x, col_y, fn, col_id)

        # output modified data frame to CSV file
        data_frames_to_csv(df, libmag.make_out_path())

    elif df_task is config.DFTasks.REPLACE_VALS:
        # replace values in a CSV file
        # X_COL: replace from these values
        # Y_COL: replace to these values
        # GROUP_COL: columns to replace
        df = pd.read_csv(config.filename)
        df = replace_vals(df, x_col, y_col, group_col)
        data_frames_to_csv(df, libmag.make_out_path())


if __name__ == "__main__":
    print("Starting MagellanMapper data-frame tasks...")
    cli.main(True)
    main()
Esempio n. 4
0
    def detect_sub_roi(cls, coord, offset, last_coord, denoise_max_shape,
                       exclude_border, sub_roi, img_path=None):
        """Perform 3D blob detection within a sub-ROI without accessing
        class attributes, such as for spawned multiprocessing.
        
        Args:
            coord (Tuple[int]): Coordinate of the sub-ROI in the order z,y,x.
            offset (Tuple[int]): Offset of the sub-ROI within the full ROI,
                in z,y,x.
            last_coord (:obj:`np.ndarray`): See attributes.
            denoise_max_shape (Tuple[int]): See attributes.
            exclude_border (bool): See attributes.
            sub_roi (:obj:`np.ndarray`): Array in which to perform detections.
            img_path (str): Path from which to load metadatat; defaults to None.
                If given, the command line arguments will be reloaded to
                set up the image and processing parameters.
        
        Returns:
            Tuple[int], :obj:`np.ndarray`: The coordinate given back again to
            identify the sub-ROI position and an array of detected blobs.

        """
        if img_path:
            # reload command-line parameters and image metadata, which is
            # required if run from a spawned (not forked) process
            cli.main(True, True)
            _, orig_info = importer.make_filenames(img_path)
            importer.load_metadata(orig_info)
        print("detecting blobs in sub-ROI at {} of {}, offset {}, shape {}..."
              .format(coord, last_coord, tuple(offset.astype(int)),
                      sub_roi.shape))
        
        if denoise_max_shape is not None:
            # further split sub-ROI for preprocessing locally
            denoise_roi_slices, _ = chunking.stack_splitter(
                sub_roi.shape, denoise_max_shape)
            for z in range(denoise_roi_slices.shape[0]):
                for y in range(denoise_roi_slices.shape[1]):
                    for x in range(denoise_roi_slices.shape[2]):
                        denoise_coord = (z, y, x)
                        denoise_roi = sub_roi[denoise_roi_slices[denoise_coord]]
                        libmag.printv_format(
                            "preprocessing sub-sub-ROI {} of {} (shape {}"
                            " within sub-ROI shape {})", 
                            (denoise_coord,
                             np.subtract(denoise_roi_slices.shape, 1),
                             denoise_roi.shape, sub_roi.shape))
                        denoise_roi = plot_3d.saturate_roi(
                            denoise_roi, channel=config.channel)
                        denoise_roi = plot_3d.denoise_roi(
                            denoise_roi, channel=config.channel)
                        # replace slices with denoised ROI
                        denoise_roi_slices[denoise_coord] = denoise_roi
            
            # re-merge into one large ROI (the image stack) in preparation for 
            # segmenting with differently sized chunks, typically larger 
            # to minimize the number of sub-ROIs and edge overlaps
            merged_shape = chunking.get_split_stack_total_shape(denoise_roi_slices)
            merged = np.zeros(
                tuple(merged_shape), dtype=denoise_roi_slices[0, 0, 0].dtype)
            chunking.merge_split_stack2(denoise_roi_slices, None, 0, merged)
            sub_roi = merged
        
        if exclude_border is None:
            exclude = None
        else:
            exclude = np.array([exclude_border, exclude_border])
            exclude[0, np.equal(coord, 0)] = 0
            exclude[1, np.equal(coord, last_coord)] = 0
        segments = detector.detect_blobs(sub_roi, config.channel, exclude)
        #print("segs before (offset: {}):\n{}".format(offset, segments))
        if segments is not None:
            # shift both coordinate sets (at beginning and end of array) to 
            # absolute positioning, using the latter set to store shifted 
            # coordinates based on duplicates and the former for initial 
            # positions to check for multiple duplicates
            detector.shift_blob_rel_coords(segments, offset)
            detector.shift_blob_abs_coords(segments, offset)
            #print("segs after:\n{}".format(segments))
        return coord, segments
        config.meta_dict[config.MetaKeys.ZOOM] = float(meta["Zoom"].strip("x"))

        # import RAW file, overwriting the same NPY file
        config.filename = path
        config.prefix = prefix_npy
        # config.filename = str(f"{pathlib.Path(path).parent}_out/export")
        cli.process_proc_tasks()

        # construct output filename, assuming input filename is in the format,
        # `<chl>_<tile-coords>.raw` and output is to tile_<t>_ch_<c>.tif
        path_split = os.path.basename(path).split("_", 1)
        metas = []
        for i, (meta_str,
                meta_list) in enumerate(zip(path_split, (chls, tiles))):
            if meta_str not in meta_list:
                meta_list.append(meta_str)
            metas.append(meta_list.index(meta_str))
        filename_out = f"tile_{metas[1]}_ch_{metas[0]}"

        # export imported file to TIF file
        print(f"Exporting file from '{path}' to '{filename_out}'")
        config.prefix = prefix_orig
        np_io.write_tif(config.image5d,
                        pathlib.Path(path).parent / filename_out,
                        imagej=True)


if __name__ == "__main__":
    cli.main(process_args_only=True)
    main()