Ejemplo n.º 1
0
def main(
    _opName: str,
    _in1: Path,
    _sigma: str,
    _calibration: str,
    _out: Path,
) -> None:
    """Initialize ImageJ"""

    # Bioformats throws a debug message, disable the loci debugger to mute it
    def disable_loci_logs():
        DebugTools = scyjava.jimport("loci.common.DebugTools")
        DebugTools.setRootLevel("WARN")

    scyjava.when_jvm_starts(disable_loci_logs)

    # This is the version of ImageJ pre-downloaded into the docker container
    logger.info("Starting ImageJ...")
    ij = imagej.init("sc.fiji:fiji:2.1.1+net.imagej:imagej-legacy:0.37.4",
                     headless=True)
    # ij_converter.ij = ij
    logger.info("Loaded ImageJ version: {}".format(ij.getVersion()))
    """ Validate and organize the inputs """
    args = []
    argument_types = []
    arg_len = 0

    # Validate opName
    opName_values = [
        "DefaultTubeness",
    ]
    assert _opName in opName_values, "opName must be one of {}".format(
        opName_values)

    # Validate in1
    in1_types = {
        "DefaultTubeness": "RandomAccessibleInterval",
    }

    # Check that all inputs are specified
    if _in1 is None and _opName in list(in1_types.keys()):
        raise ValueError("{} must be defined to run {}.".format(
            "in1", _opName))
    elif _in1 != None:
        in1_type = in1_types[_opName]

        # switch to images folder if present
        if _in1.joinpath("images").is_dir():
            _in1 = _in1.joinpath("images").absolute()

        args.append([f for f in _in1.iterdir() if f.is_file()])
        arg_len = len(args[-1])
    else:
        argument_types.append(None)
        args.append([None])

    # Validate sigma
    sigma_types = {
        "DefaultTubeness": "double",
    }

    # Check that all inputs are specified
    if _sigma is None and _opName in list(sigma_types.keys()):
        raise ValueError("{} must be defined to run {}.".format(
            "sigma", _opName))
    else:
        sigma = None

    # Validate calibration
    calibration_types = {
        "DefaultTubeness": "double[]",
    }

    # Check that all inputs are specified
    if _calibration is None and _opName in list(calibration_types.keys()):
        raise ValueError("{} must be defined to run {}.".format(
            "calibration", _opName))
    else:
        calibration = None

    for i in range(len(args)):
        if len(args[i]) == 1:
            args[i] = args[i] * arg_len
    """ Set up the output """
    out_types = {
        "DefaultTubeness": "IterableInterval",
    }
    """ Run the plugin """
    try:
        for ind, (in1_path, ) in enumerate(zip(*args)):
            if in1_path != None:

                # Load the first plane of image in in1 collection
                logger.info("Processing image: {}".format(in1_path))
                in1_br = BioReader(in1_path)

                # Convert to appropriate numpy array
                in1 = ij_converter.to_java(ij,
                                           np.squeeze(in1_br[:, :, 0:1, 0, 0]),
                                           in1_type)
                metadata = in1_br.metadata
                fname = in1_path.name
                dtype = ij.py.dtype(in1)
            if _sigma is not None:
                sigma = ij_converter.to_java(ij, _sigma, sigma_types[_opName],
                                             dtype)

            if _calibration is not None:
                calibration = ij_converter.to_java(ij, _calibration,
                                                   calibration_types[_opName],
                                                   dtype)

            logger.info("Running op...")
            if _opName == "DefaultTubeness":
                out = ij.op().filter().tubeness(in1, sigma, calibration)

            logger.info("Completed op!")
            if in1_path != None:
                in1_br.close()

            # Saving output file to out
            logger.info("Saving...")
            out_array = ij_converter.from_java(ij, out, out_types[_opName])
            bw = BioWriter(_out.joinpath(fname), metadata=metadata)
            bw.Z = 1
            bw.dtype = out_array.dtype
            bw[:] = out_array.astype(bw.dtype)
            bw.close()

    except:
        logger.error("There was an error, shutting down jvm before raising...")
        raise

    finally:
        # Exit the program
        logger.info("Shutting down jvm...")
        del ij
        jpype.shutdownJVM()
        logger.info("Complete!")
Ejemplo n.º 2
0
def main(inpDir: Path, outDir: Path, filePattern: str = None) -> None:
    """ Turn labels into flow fields.

    Args:
        inpDir: Path to the input directory
        outDir: Path to the output directory
    """

    # Use a gpu if it's available
    use_gpu = torch.cuda.is_available()
    if use_gpu:
        dev = torch.device("cuda")
    else:
        dev = torch.device("cpu")
    logger.info(f'Running on: {dev}')

    # Determine the number of threads to run on
    num_threads = max([cpu_count() // 2, 1])
    logger.info(f'Number of threads: {num_threads}')

    # Get all file names in inpDir image collection based on input pattern
    if filePattern:
        fp = filepattern.FilePattern(inpDir, filePattern)
        inpDir_files = [file[0]['file'].name for file in fp()]
        logger.info('Processing %d labels based on filepattern  ' %
                    (len(inpDir_files)))
    else:
        inpDir_files = [f.name for f in Path(inpDir).iterdir() if f.is_file()]

    # Loop through files in inpDir image collection and process
    processes = []

    if use_gpu:
        executor = ThreadPoolExecutor(num_threads)
    else:
        executor = ProcessPoolExecutor(num_threads)

    for f in inpDir_files:
        br = BioReader(Path(inpDir).joinpath(f).absolute())
        out_file = Path(outDir).joinpath(
            f.replace('.ome', '_flow.ome').replace('.tif',
                                                   '.zarr')).absolute()
        bw = BioWriter(out_file, metadata=br.metadata)
        bw.C = 4
        bw.dtype = np.float32
        bw.channel_names = ['cell_probability', 'x', 'y', 'labels']

        bw._backend._init_writer()

        for z in range(br.Z):
            for x in range(0, br.X, TILE_SIZE):
                for y in range(0, br.Y, TILE_SIZE):
                    processes.append(
                        executor.submit(flow_thread,
                                        Path(inpDir).joinpath(f).absolute(),
                                        out_file, use_gpu, dev, x, y, z))
        bw.close()
        br.close()

    done, not_done = wait(processes, 0)

    logger.info(f'Percent complete: {100 * len(done) / len(processes):6.3f}%')

    while len(not_done) > 0:
        for r in done:
            r.result()
        done, not_done = wait(processes, 5)
        logger.info(
            f'Percent complete: {100 * len(done) / len(processes):6.3f}%')

    executor.shutdown()