Ejemplo n.º 1
0
def main():
    with BiaflowsJob.from_cli(sys.argv[1:]) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        # 1. Create working directories on the machine:
        # 2. Download (or read) data
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = False
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=False, **nj.flags)

        # 3. Execute workflow
        scale = nj.parameters.icy_scale
        sensitivity = nj.parameters.icy_sensitivity

        nj.job.update(progress=25, statusComment="Launching workflow...")
        # Modify the parameters in the job.xml file
        replaceScaleParameterCommand = "sed -i '/scale/c\\t<parameter name=\"scale\">{}</parameter>' job.xml".format(
            scale)
        replaceSensitivityParameterCommand = "sed -i '/sensitivity/c\\t<parameter name=\"sensitivity\">{}</parameter>' job.xml".format(
            sensitivity)
        replaceInputFolderParameterCommand = "sed -i '/inputFolder/c\\t<parameter name=\"inputFolder\">{}</parameter>' job.xml".format(
            '"' + in_path + '"')
        replaceOutputFolderParameterCommand = "sed -i '/outputFolder/c\\t<parameter name=\"outputFolder\">{}</parameter>' job.xml".format(
            '"' + out_path + '"')
        call(replaceScaleParameterCommand, shell=True, cwd="/icy")
        call(replaceSensitivityParameterCommand, shell=True, cwd="/icy")
        call(replaceInputFolderParameterCommand, shell=True, cwd="/icy")
        call(replaceOutputFolderParameterCommand, shell=True, cwd="/icy")

        # Run script in ICY

        command = "java -jar icy.jar --headless --execute plugins.volker.commandlinescriptrunner.CommandLineScriptRunner"
        call(command, shell=True, cwd="/icy")

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        nj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Ejemplo n.º 2
0
def main():
    with BiaflowsJob.from_cli(sys.argv[1:]) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialization...")

        # 1. Create working directories on the machine:
        # 2. Download (or read) data
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = True
        nj.job.update(
            progress=1,
            statusComment="Execute workflow on problem class '{}'".format(
                problem_cls))
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Execute workflow
        scale3sens = nj.parameters.icy_scale3sensitivity
        nj.job.update(progress=25, statusComment="Launching workflow...")
        call("java -cp /icy/lib/ -jar /icy/icy.jar -hl",
             shell=True,
             cwd="/icy")
        call(
            "java -cp /icy/lib/ -jar /icy/icy.jar -hl -x plugins.adufour.protocols.Protocols "
            "protocol=\"/icy/protocols/protocol.protocol\" inputFolder=\"{}\" outputFolder=\"{}\" extension=tif "
            "scale2enable=true scale2sensitivity={}".format(
                in_path, out_path, scale3sens),
            shell=True,
            cwd="/icy")

        # 3.5 Remove the xml-output files
        for p in Path(out_path).glob("*.xml"):
            p.unlink()

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        nj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Ejemplo n.º 3
0
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_OBJTRK)
        is_2d = False
        nj.job.update(
            status=Job.RUNNING,
            progress=0,
            statusComment="Running workflow for problem class '{}'".format(
                problem_cls))

        # 1. Create working directories on the machine
        # 2. Download the images
        nj.job.update(progress=0, statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run java -Xmx6000m -cp /fiji/jars/ij.jar ij.ImageJ --headless --console " \
                  "-batch macro.ijm \"input={}, output={}, medrad={}, thr={}, erodrad={}, dmapds={}, noisetol={}\""\
                    .format(in_path, out_path, nj.parameters.medrad, nj.parameters.threshold,
                            nj.parameters.erodrad, nj.parameters.dmapds, nj.parameters.noisetol)

        return_code = call(command, shell=True,
                           cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(
                return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
Ejemplo n.º 4
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME"))  # Mandatory for Singularity

    with BiaflowsJob.from_cli(argv) as bj:
        # Change following to the actual problem class of the workflow
        problem_cls = get_discipline(bj, default=CLASS_OBJSEG)

        bj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, bj, is_2d=True, **bj.flags)

        # 2. Run image analysis workflow
        bj.job.update(progress=25, statusComment="Launching workflow...")

        # Add here the code for running the analysis script

        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls,
                    bj,
                    in_imgs,
                    out_path,
                    **bj.flags,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        # 4. Compute and upload metrics
        bj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path,
                       **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100,
                      status=Job.TERMINATED,
                      status_comment="Finished.")
Ejemplo n.º 5
0
def main(argv):
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_PIXCLA)
        is_2d = True
        print(nj.parameters)

        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, **nj.flags)

        # 2. Call the image analysis workflow
        nj.job.update(progress=10, statusComment="Load model...")
        net = load_model("/app/CP58_dice_0.9373_loss_0.0265.pth")

        for in_image in nj.monitor(in_images, start=20, end=75, period=0.05, prefix="Apply UNet to input images"):
            img = imread(in_image.filepath, is_2d=is_2d)

            mask = predict_img(
                net=net, full_img=img,
                scale_factor=0.5,  # value used at training
                out_threshold=nj.parameters.threshold
            )

            imwrite(
                path=os.path.join(out_path, in_image.filename),
                image=mask.astype(np.uint8),
                is_2d=is_2d
            )

        # 4. Create and upload annotations
        nj.job.update(progress=70, statusComment="Uploading extracted annotation...")
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
            "start": 70, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        # 6. End the job
        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with BiaflowsJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_OBJTRK)
        is_2d = False
        nj.job.update(
            status=Job.RUNNING,
            progress=0,
            statusComment="Running workflow for problem class '{}'".format(
                problem_cls))

        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, **nj.flags)

        # 2. Run image analysis workflow
        nj.job.update(progress=25, statusComment="Launching workflow...")

        for in_img in in_imgs:

            # convert the image data to the Cell Tracking Challenge format
            img = io.imread(in_img.filepath)
            T = img.shape[0]
            Y = img.shape[1]
            X = img.shape[2]
            img_data = img.ravel()
            index = 0
            offset = Y * X
            for t in range(T):
                io.imsave(os.path.join(tmp_path, 't{0:03d}.tif'.format(t)),
                          img_data[index:index + offset].reshape((Y, X)))
                index += offset

            # do segmentation and tracking
            process_dataset(tmp_path, tmp_path, '/app/model.h5')

            # convert the tracking results to the required format
            index = 0
            res_img = np.zeros((T, Y, X), np.uint16)
            res_data = res_img.ravel()
            for t in range(T):
                res = io.imread(
                    os.path.join(tmp_path, 'mask{0:03d}.tif'.format(t)))
                res_data[index:index + offset] = res.ravel()
                index += offset
            io.imsave(os.path.join(out_path, in_img.filename), res_img)
            os.rename(
                os.path.join(tmp_path, 'res_track.txt'),
                os.path.join(out_path, in_img.filename_no_extension + '.txt'))

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls,
                    nj,
                    in_imgs,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90,
                      statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_imgs, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
Ejemplo n.º 7
0
def main(argv):
    base_path = "{}".format(os.getenv("HOME")) # Mandatory for Singularity
    with BiaflowsJob.from_cli(argv) as bj:
        # Change following to the actual problem class of the workflow
        problem_cls = get_discipline(bj, default=CLASS_OBJSEG)
        
        bj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")
        
        # 1. Prepare data for workflow
        in_imgs, gt_imgs, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, bj, is_2d=True, **bj.flags)

        # Make sure all images have at least 224x224 dimensions
        # and that minshape / maxshape * minshape >= 224
        # 0 = Grayscale (if input RGB, convert to grayscale)
        # 1,2,3 = rgb channel
        nuc_channel = bj.parameters.nuc_channel
        resized = {}
        for bfimg in in_imgs:
            fn = os.path.join(in_path, bfimg.filename)
            img = imageio.imread(fn)
            if len(img.shape) > 2 and nuc_channel == 0:
                gray_rgb = False
                if np.array_equal(img[:,:,0],img[:,:,1]) and np.array_equal(img[:,:,0],img[:,:,2]):
                    gray_rgb = True
                img = skimage.color.rgb2gray(img) * 255
                img = img.astype(np.uint8)
                # Invert intensity if not grayscale img ie. expect the image
                # to be H&E stained image with dark nuclei
                if not gray_rgb:
                    img = np.invert(img)
            minshape = min(img.shape[:2])
            maxshape = max(img.shape[:2])
            if minshape != maxshape or minshape < 224:
                resized[bfimg.filename] = img.shape
                padshape = []
                for i in range(2):
                    if img.shape[i] < max(224,maxshape):
                        padshape.append((0,max(224,maxshape)-img.shape[i]))
                    else:
                        padshape.append((0,0))
                if len(img.shape) == 3:
                    padshape.append((0,0))
                img = np.pad(img, padshape, 'constant', constant_values=0)
            imageio.imwrite(os.path.join(tmp_path, bfimg.filename), img)

        # 2. Run image analysis workflow
        bj.job.update(progress=25, statusComment="Launching workflow...")

        # Add here the code for running the analysis script
        #"--chan", "{:d}".format(nuc_channel)
        cmd = ["python", "-m", "cellpose", "--dir", tmp_path, "--pretrained_model", "nuclei", "--save_tif", "--no_npy", "--chan", "{:d}".format(nuc_channel), "--diameter", "{:f}".format(bj.parameters.diameter), "--cellprob_threshold", "{:f}".format(bj.parameters.prob_threshold)]
        status = subprocess.run(cmd)

        if status.returncode != 0:
            print("Running Cellpose failed, terminate")
            sys.exit(1)

        # Crop to original shape
        for bimg in in_imgs:
            shape = resized.get(bimg.filename, None)
            if shape:
                img = imageio.imread(os.path.join(tmp_path,bimg.filename_no_extension+"_cp_masks.tif"))
                img = img[0:shape[0], 0:shape[1]]
                imageio.imwrite(os.path.join(out_path,bimg.filename), img)
            else:
                shutil.copy(os.path.join(tmp_path,bimg.filename_no_extension+"_cp_masks.tif"), os.path.join(out_path,bimg.filename))
        
        # 3. Upload data to BIAFLOWS
        upload_data(problem_cls, bj, in_imgs, out_path, **bj.flags, monitor_params={
            "start": 60, "end": 90, "period": 0.1,
            "prefix": "Extracting and uploading polygons from masks"})
        
        # 4. Compute and upload metrics
        bj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, bj, in_imgs, gt_path, out_path, tmp_path, **bj.flags)

        # 5. Pipeline finished
        bj.job.update(progress=100, status=Job.TERMINATED, status_comment="Finished.")