Exemple #1
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME")) # Mandatory for Singularity
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as bj:
        bj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, bj, is_2d=True, **bj.flags)

        # 2. Run image analysis workflow
        bj.job.update(progress=25, statusComment="Launching workflow...")
        shArgs = ["python", "/app/deepcell_script.py", in_path, tmp_path, out_path, str(bj.parameters.nuclei_min_size), str(bj.parameters.boundary_weight)]
        return_code = call(" ".join(shArgs), shell=True, cwd="/app/DeepCell/keras_version")

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls, bj, in_imgs, out_path, **bj.flags, monitor_params={
            "start": 60, "end": 90, "period": 0.1,
            "prefix": "Extracting and uploading polygons from masks"})
        
        # 4. Compute and upload metrics
        bj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path, **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100, status=Job.TERMINATED, status_comment="Finished.")
Exemple #2
0
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")

        problem_cls = CLASS_OBJSEG
        is_2d = True

        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run java -Xmx1000m -cp /fiji/jars/ij.jar ij.ImageJ --headless --console " \
                  "-macro macro.ijm \"input={}, output={}, radius={}, threshold={}\"".format(in_path, out_path, nj.parameters.ij_radius, nj.parameters.ij_threshold)
        return_code = call(command, shell=True, cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
            "start": 60, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job
    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")
        problem_cls = CLASS_TRETRC
        is_2d = False

        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        workflow(in_images, out_path)

        #if return_code != 0:
        #   err_desc = "Failed to execute the Vaa3D (return code: {})".format(return_code)
        #nj.job.update(progress=50, statusComment=err_desc)
        #    raise ValueError(err_desc)
        print('files in out_path ' + out_path + ': ')
        for file in glob.glob(out_path + '/*'):
            print(file)

        #files = (glob.glob(in_path+"/*.tif"))
        #print('Removing flipped images...')
        #for i in range(0,len(files)):
        #    files[i] = files[i].replace('/in/','/out/')
        #    print(files[i])
        #for out_file in files:
        #    os.remove(out_file)

        # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using
        # the AnnotationExporter module
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    projection=-1,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        #5. Compute and upload the metrics
        nj.job.update(
            progress=80,
            statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
Exemple #4
0
def main():
    with BiaflowsJob.from_cli(sys.argv[1:]) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        # 1. Create working directories on the machine:
        # 2. Download (or read) data
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = False
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=False, **nj.flags)

        # 3. Execute workflow
        scale = nj.parameters.icy_scale
        sensitivity = nj.parameters.icy_sensitivity

        nj.job.update(progress=25, statusComment="Launching workflow...")
        # Modify the parameters in the job.xml file
        replaceScaleParameterCommand = "sed -i '/scale/c\\t<parameter name=\"scale\">{}</parameter>' job.xml".format(
            scale)
        replaceSensitivityParameterCommand = "sed -i '/sensitivity/c\\t<parameter name=\"sensitivity\">{}</parameter>' job.xml".format(
            sensitivity)
        replaceInputFolderParameterCommand = "sed -i '/inputFolder/c\\t<parameter name=\"inputFolder\">{}</parameter>' job.xml".format(
            '"' + in_path + '"')
        replaceOutputFolderParameterCommand = "sed -i '/outputFolder/c\\t<parameter name=\"outputFolder\">{}</parameter>' job.xml".format(
            '"' + out_path + '"')
        call(replaceScaleParameterCommand, shell=True, cwd="/icy")
        call(replaceSensitivityParameterCommand, shell=True, cwd="/icy")
        call(replaceInputFolderParameterCommand, shell=True, cwd="/icy")
        call(replaceOutputFolderParameterCommand, shell=True, cwd="/icy")

        # Run script in ICY

        command = "java -jar icy.jar --headless --execute plugins.volker.commandlinescriptrunner.CommandLineScriptRunner"
        call(command, shell=True, cwd="/icy")

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        nj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Exemple #5
0
def main():
    with BiaflowsJob.from_cli(sys.argv[1:]) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialization...")

        # 1. Create working directories on the machine:
        # 2. Download (or read) data
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = True
        nj.job.update(
            progress=1,
            statusComment="Execute workflow on problem class '{}'".format(
                problem_cls))
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Execute workflow
        scale3sens = nj.parameters.icy_scale3sensitivity
        nj.job.update(progress=25, statusComment="Launching workflow...")
        call("java -cp /icy/lib/ -jar /icy/icy.jar -hl",
             shell=True,
             cwd="/icy")
        call(
            "java -cp /icy/lib/ -jar /icy/icy.jar -hl -x plugins.adufour.protocols.Protocols "
            "protocol=\"/icy/protocols/protocol.protocol\" inputFolder=\"{}\" outputFolder=\"{}\" extension=tif "
            "scale2enable=true scale2sensitivity={}".format(
                in_path, out_path, scale3sens),
            shell=True,
            cwd="/icy")

        # 3.5 Remove the xml-output files
        for p in Path(out_path).glob("*.xml"):
            p.unlink()

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        nj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Exemple #6
0
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        problem_cls = CLASS_OBJSEG
        is_2d = True

        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "python script.py --infld {} --outfld {} --blurad {} --radthr {} --intthr {} --spltnt {} --minsize {}".format(
            in_path, out_path, nj.parameters.blurad, nj.parameters.radthr,
            nj.parameters.intthr, nj.parameters.spltnt, nj.parameters.minsize)
        return_code = call(command, shell=True,
                           cwd="/app")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the Python script (return code: {})".format(
                return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
def main(argv):
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = CLASS_PRTTRK
        is_2d = False

        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 2. Call the image analysis workflow
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run ./ImageJ-linux64 -macro macro.ijm " \
                  "\"input={}, output={}, laprad={}, thr={}, maxlnkdst={}\" -batch".format(
            in_path, out_path, nj.parameters.laprad, nj.parameters.thr, nj.parameters.maxlnkdst)

        return_code = call(command, shell=True,
                           cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(
                return_code)
            nj.job.update(progress=100, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Create and upload annotations
        nj.job.update(progress=70,
                      statusComment="Uploading extracted annotation...")
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 70,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(
            progress=90,
            statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End the job
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
def main(argv):
    base_path = "{}".format(os.getenv("HOME")) # Mandatory for Singularity
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, **nj.flags)

        temp_img = skimage.io.imread(os.path.join(in_path,"{}".format(in_imgs[0].filename)))
        classification_project = "/app/PixelClassification3D.ilp"

        # 2. Run ilastik prediction
        nj.job.update(progress=25, statusComment="Launching workflow...")
        shArgs = [
            "/app/ilastik/run_ilastik.sh",
            "--headless",
            "--project="+classification_project,
            "--export_source=Probabilities",
            "--output_format='multipage tiff'",
            '--output_filename_format='+os.path.join(tmp_path,'{nickname}.tiff')
            ]
        shArgs += [image.filepath for image in in_imgs]
        
        call_return = call(" ".join(shArgs), shell=True)

        # Threshold probabilites
        for image in in_imgs:
            fn = os.path.join(tmp_path,"{}.tiff".format(image.filename[:-4]))
            outfn = os.path.join(out_path,"{}".format(image.filename))
            img = skimage.io.imread(fn)
            img = label_objects(img, nj.parameters.probability_threshold)
            skimage.io.imsave(outfn, img)

        # 3. Upload data to Cytomine
        upload_data(problem_cls, nj, in_imgs, out_path, is_2d=False, **nj.flags, monitor_params={
            "start": 60, "end": 90, "period": 0.1,
            "prefix": "Extracting and uploading polygons from masks"})
        
        # 4. Compute and upload metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_imgs, gt_path, out_path, tmp_path, **nj.flags)

        # 5. Pipeline finished
        nj.job.update(progress=100, status=Job.TERMINATED, status_comment="Finished.")
Exemple #9
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))  # Mandatory for Singularity

    with BiaflowsJob.from_cli(argv) as bj:
        # Change following to the actual problem class of the workflow
        problem_cls = get_discipline(bj, default=CLASS_OBJSEG)

        bj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, bj, is_2d=True, **bj.flags)

        # 2. Run image analysis workflow
        bj.job.update(progress=25, statusComment="Launching workflow...")

        # Add here the code for running the analysis script

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    bj,
                    in_imgs,
                    out_path,
                    **bj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        bj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path,
                       **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Exemple #10
0
def main(argv):
    # 0. Initialize Cytomine client and job
    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")

        problem_cls = CLASS_LOOTRC
        is_2d = False

        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj,
                                                                                  **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run java -Xmx6000m -cp /fiji/jars/ij.jar ij.ImageJ --headless --console " \
                  "-macro macro.ijm \"input={}, output={}, gblur={}, rad={}, thr={}\"".format(in_path, out_path, nj.parameters.gblur, nj.parameters.rad, nj.parameters.thr)
        return_code = call(command, shell=True, cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using
        # the AnnotationExporter module)
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, projection=-1, monitor_params={
            "start": 60, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=80, statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(
            problem_cls,
            nj, in_images,
            gt_path, out_path, tmp_path,
            metric_params={
                "gating_dist": 5
                # ... put any metric specific parameters here
            },
            **nj.flags
        )

        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
Exemple #11
0
def main(argv):
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_PIXCLA)
        is_2d = True
        print(nj.parameters)

        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, **nj.flags)

        # 2. Call the image analysis workflow
        nj.job.update(progress=10, statusComment="Load model...")
        net = load_model("/app/CP58_dice_0.9373_loss_0.0265.pth")

        for in_image in nj.monitor(in_images, start=20, end=75, period=0.05, prefix="Apply UNet to input images"):
            img = imread(in_image.filepath, is_2d=is_2d)

            mask = predict_img(
                net=net, full_img=img,
                scale_factor=0.5,  # value used at training
                out_threshold=nj.parameters.threshold
            )

            imwrite(
                path=os.path.join(out_path, in_image.filename),
                image=mask.astype(np.uint8),
                is_2d=is_2d
            )

        # 4. Create and upload annotations
        nj.job.update(progress=70, statusComment="Uploading extracted annotation...")
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
            "start": 70, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        # 6. End the job
        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
Exemple #12
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))  # Mandatory for Singularity
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=True, **nj.flags)

        # 2. Run image analysis workflow
        nj.job.update(progress=25, statusComment="Launching workflow...")

        model = utils.model_builder.get_model_3_class(
            unet_utils.IMAGE_SIZE[0], unet_utils.IMAGE_SIZE[1],
            channels=3)  # Create the model
        model.load_weights('/app/weights.h5')

        dataset = Dataset()
        for img in in_imgs:
            image_id = img.filename_no_extension
            tiles = dataset.load_image(image_id, img.filepath, TILE_OVERLAP)
            # orig_size = dataset.get_orig_size(image_id)
            # mask_img = np.zeros(orig_size, dtype=np.uint8)

            tile_stack = np.zeros((len(tiles), unet_utils.IMAGE_SIZE[0],
                                   unet_utils.IMAGE_SIZE[1], 3))
            for i, tile in enumerate(tiles):
                tile_stack[i, :, :, :] = tile
            tile_stack = tile_stack / 255

            tile_masks = model.predict(tile_stack, batch_size=1)

            probmap = dataset.merge_tiles(image_id,
                                          tile_masks,
                                          tile_overlap=TILE_OVERLAP)
            labelimg = label_image(probmap, nj.parameters.boundary_weight,
                                   nj.parameters.nuclei_min_size)
            skimage.io.imsave(os.path.join(out_path, img.filename), labelimg)

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    nj,
                    in_imgs,
                    out_path,
                    **nj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_imgs, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 5. Pipeline finished
        nj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_OBJTRK)
        is_2d = False
        nj.job.update(
            status=Job.RUNNING,
            progress=0,
            statusComment="Running workflow for problem class '{}'".format(
                problem_cls))

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, **nj.flags)

        # 2. Run image analysis workflow
        nj.job.update(progress=25, statusComment="Launching workflow...")

        for in_img in in_imgs:

            # convert the image data to the Cell Tracking Challenge format
            img = io.imread(in_img.filepath)
            T = img.shape[0]
            Y = img.shape[1]
            X = img.shape[2]
            img_data = img.ravel()
            index = 0
            offset = Y * X
            for t in range(T):
                io.imsave(os.path.join(tmp_path, 't{0:03d}.tif'.format(t)),
                          img_data[index:index + offset].reshape((Y, X)))
                index += offset

            # do segmentation and tracking
            process_dataset(tmp_path, tmp_path, '/app/model.h5')

            # convert the tracking results to the required format
            index = 0
            res_img = np.zeros((T, Y, X), np.uint16)
            res_data = res_img.ravel()
            for t in range(T):
                res = io.imread(
                    os.path.join(tmp_path, 'mask{0:03d}.tif'.format(t)))
                res_data[index:index + offset] = res.ravel()
                index += offset
            io.imsave(os.path.join(out_path, in_img.filename), res_img)
            os.rename(
                os.path.join(tmp_path, 'res_track.txt'),
                os.path.join(out_path, in_img.filename_no_extension + '.txt'))

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_imgs,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_imgs, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
Exemple #14
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))  # Mandatory for Singularity
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as bj:
        bj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, bj, is_2d=True, **bj.flags)
        files = [image.filepath for image in in_imgs]

        # 2. Run Mask R-CNN prediction
        bj.job.update(progress=25, statusComment="Launching workflow...")

        model_dir = "/app"
        dataset = Dataset()
        dataset.load_files(files)
        dataset.prepare()
        inference_config = InferenceConfig()
        model = modellib.MaskRCNN(mode="inference",
                                  config=inference_config,
                                  model_dir=model_dir)
        model.load_weights(os.path.join(model_dir, 'weights.h5'), by_name=True)

        for i, image_id in enumerate(dataset.image_ids):
            tiles = dataset.load_image(image_id,
                                       bj.parameters.nuclei_major_axis)
            tile_masks = []
            for image in tiles:
                mask = model.detect([image], verbose=0)[0]
                tile_masks.append(mask)

            mask_img = dataset.merge_tiles(image_id, tile_masks)
            skimage.io.imsave(
                os.path.join(out_path, os.path.basename(files[i])), mask_img)

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    bj,
                    in_imgs,
                    out_path,
                    **bj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        bj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path,
                       **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Exemple #15
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME")) # Mandatory for Singularity
    with BiaflowsJob.from_cli(argv) as bj:
        # Change following to the actual problem class of the workflow
        problem_cls = get_discipline(bj, default=CLASS_OBJSEG)
        
        bj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, bj, is_2d=True, **bj.flags)

        # Make sure all images have at least 224x224 dimensions
        # and that minshape / maxshape * minshape >= 224
        # 0 = Grayscale (if input RGB, convert to grayscale)
        # 1,2,3 = rgb channel
        nuc_channel = bj.parameters.nuc_channel
        resized = {}
        for bfimg in in_imgs:
            fn = os.path.join(in_path, bfimg.filename)
            img = imageio.imread(fn)
            if len(img.shape) > 2 and nuc_channel == 0:
                gray_rgb = False
                if np.array_equal(img[:,:,0],img[:,:,1]) and np.array_equal(img[:,:,0],img[:,:,2]):
                    gray_rgb = True
                img = skimage.color.rgb2gray(img) * 255
                img = img.astype(np.uint8)
                # Invert intensity if not grayscale img ie. expect the image
                # to be H&E stained image with dark nuclei
                if not gray_rgb:
                    img = np.invert(img)
            minshape = min(img.shape[:2])
            maxshape = max(img.shape[:2])
            if minshape != maxshape or minshape < 224:
                resized[bfimg.filename] = img.shape
                padshape = []
                for i in range(2):
                    if img.shape[i] < max(224,maxshape):
                        padshape.append((0,max(224,maxshape)-img.shape[i]))
                    else:
                        padshape.append((0,0))
                if len(img.shape) == 3:
                    padshape.append((0,0))
                img = np.pad(img, padshape, 'constant', constant_values=0)
            imageio.imwrite(os.path.join(tmp_path, bfimg.filename), img)

        # 2. Run image analysis workflow
        bj.job.update(progress=25, statusComment="Launching workflow...")

        # Add here the code for running the analysis script
        #"--chan", "{:d}".format(nuc_channel)
        cmd = ["python", "-m", "cellpose", "--dir", tmp_path, "--pretrained_model", "nuclei", "--save_tif", "--no_npy", "--chan", "{:d}".format(nuc_channel), "--diameter", "{:f}".format(bj.parameters.diameter), "--cellprob_threshold", "{:f}".format(bj.parameters.prob_threshold)]
        status = subprocess.run(cmd)

        if status.returncode != 0:
            print("Running Cellpose failed, terminate")
            sys.exit(1)

        # Crop to original shape
        for bimg in in_imgs:
            shape = resized.get(bimg.filename, None)
            if shape:
                img = imageio.imread(os.path.join(tmp_path,bimg.filename_no_extension+"_cp_masks.tif"))
                img = img[0:shape[0], 0:shape[1]]
                imageio.imwrite(os.path.join(out_path,bimg.filename), img)
            else:
                shutil.copy(os.path.join(tmp_path,bimg.filename_no_extension+"_cp_masks.tif"), os.path.join(out_path,bimg.filename))
        
        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls, bj, in_imgs, out_path, **bj.flags, monitor_params={
            "start": 60, "end": 90, "period": 0.1,
            "prefix": "Extracting and uploading polygons from masks"})
        
        # 4. Compute and upload metrics
        bj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path, **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100, status=Job.TERMINATED, status_comment="Finished.")
Exemple #16
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as bj:
        bj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialization...")

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, bj, is_2d=True, **bj.flags)
        list_imgs = [image.filepath for image in in_imgs]

        # 2. Run Stardist model on input images
        bj.job.update(progress=25, statusComment="Launching workflow...")

        #Loading pre-trained Stardist model
        np.random.seed(17)

        lbl_cmap = random_label_cmap()
        model_fluo = StarDist2D(None,
                                name='2D_versatile_fluo',
                                basedir='/models/')
        model_he = StarDist2D(None, name='2D_versatile_he', basedir='/models/')

        #Go over images
        for img_path in list_imgs:
            fluo = True
            img = imageio.imread(img_path)
            n_channel = 3 if img.ndim == 3 else 1

            if n_channel == 3:
                # Check if 3-channel grayscale image or actually an RGB image
                if np.array_equal(img[:, :, 0],
                                  img[:, :, 1]) and np.array_equal(
                                      img[:, :, 0], img[:, :, 2]):
                    img = skimage.color.rgb2gray(img)
                else:
                    fluo = False

            # normalize channels independently (0,1,2) normalize channels jointly (0,1)
            axis_norm = (0, 1)
            img = normalize(img,
                            bj.parameters.stardist_norm_perc_low,
                            bj.parameters.stardist_norm_perc_high,
                            axis=axis_norm)

            #Stardist model prediction with thresholds
            if fluo:
                labels, details = model_fluo.predict_instances(
                    img,
                    prob_thresh=bj.parameters.stardist_prob_t,
                    nms_thresh=bj.parameters.stardist_nms_t)
            else:
                labels, details = model_he.predict_instances(
                    img,
                    prob_thresh=bj.parameters.stardist_prob_t,
                    nms_thresh=bj.parameters.stardist_nms_t)

            # Convert labels to uint16 for BIAFLOWS
            labels = labels.astype(np.uint16)
            imageio.imwrite(os.path.join(out_path, os.path.basename(img_path)),
                            labels)

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    bj,
                    in_imgs,
                    out_path,
                    **bj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        bj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path,
                       **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))  # Mandatory for Singularity
    problem_cls = CLASS_OBJSEG

    with BiaflowsJob.from_cli(argv) as bj:
        bj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, bj, is_2d=True, **bj.flags)

        plugindir = "/app/plugins"
        pipeline = "/app/CP_detect_nuclei.cppipe"
        file_list = os.path.join(tmp_path, "file_list.txt")
        fh = open(file_list, "w")
        for image in in_imgs:
            fh.write(image.filepath + "\n")
        fh.close()

        # 2. Run CellProfiler pipeline
        bj.job.update(progress=25, statusComment="Launching workflow...")
        mod_pipeline = parseCPparam(bj, pipeline, tmp_path)

        shArgs = [
            "python", "/app/CellProfiler/CellProfiler.py", "-c", "-r", "-b",
            "--do-not-fetch", "-p", mod_pipeline, "-i", in_path, "-o",
            out_path, "-t", tmp_path, "--plugins-directory", plugindir,
            "--file-list", file_list
        ]
        return_code = call(" ".join(shArgs),
                           shell=True,
                           cwd="/app/CellProfiler")

        if return_code != 0:
            err_desc = "Failed to execute the CellProfiler pipeline (return code: {})".format(
                return_code)
            bj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    bj,
                    in_imgs,
                    out_path,
                    **bj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        bj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path,
                       **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")