def main():
    with NeubiasJob.from_cli(sys.argv[1:]) as nj:
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialization...")

        # 1. Create working directories on the machine:
        # 2. Download (or read) data
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = True
        nj.job.update(progress=1, statusComment="Execute workflow on problem class '{}'".format(problem_cls))
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Execute workflow
        scale3sens = nj.parameters.icy_scale3sensitivity
        nj.job.update(progress=25, statusComment="Launching workflow...")
        call("java -cp /icy/lib/ -jar /icy/icy.jar -hl", shell=True, cwd="/icy")
        call("java -cp /icy/lib/ -jar /icy/icy.jar -hl -x plugins.adufour.protocols.Protocols "
             "protocol=\"/icy/protocols/protocol.protocol\" inputFolder=\"{}\" outputFolder=\"{}\" extension=tif "
             "scale2enable=true scale2sensitivity={}".format(in_path, out_path, scale3sens), shell=True, cwd="/icy")

        # 3.5 Remove the xml-output files
        for p in Path(out_path).glob("*.xml"):
            p.unlink()
        
        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
           "start": 60, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        nj.job.update(progress=100, status=Job.TERMINATED, status_comment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with NeubiasJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_SPTCNT)
        is_2d = False
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Running workflow for problem class '{}' in {}D".format(problem_cls, 2 if is_2d else 3))

        # 1. Create working directories on the machine
        # 2. Download the images
        nj.job.update(progress=0, statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run java -Xmx6000m -cp /fiji/jars/ij.jar ij.ImageJ --headless --console " \
                  "-macro macro.ijm \"input={}, output={}, scale={}, radius_xy={}, radius_z={}, noise={}\"".format(in_path, out_path, nj.parameters.ij_scale, nj.parameters.ij_radius_xy, nj.parameters.ij_radius_z, nj.parameters.ij_noise)
        return_code = call(command, shell=True, cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
           "start": 60, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)
        
        # 6. End
        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
Esempio n. 3
0
def main(argv):
    with NeubiasJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_PIXCLA)
        is_2d = True

        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, **nj.flags)

        # 2. Call the image analysis workflow
        nj.job.update(progress=10, statusComment="Load model...")
        net = load_model("/app/model.pth")
        device = torch.device("cpu")

        for in_image in Monitor(nj,
                                in_images,
                                start=20,
                                end=75,
                                period=0.05,
                                prefix="Apply UNet to input images"):
            mask = predict_img(net,
                               in_image.filepath,
                               device="cpu",
                               out_threshold=nj.parameters.threshold)

            imwrite(path=os.path.join(out_path, in_image.filename),
                    image=mask.astype(np.uint8),
                    is_2d=is_2d)

        # 4. Create and upload annotations
        nj.job.update(progress=70,
                      statusComment="Uploading extracted annotation...")
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 70,
                        "end": 90,
                        "period": 0.1
                    })

        # 5. Compute and upload the metrics
        nj.job.update(
            progress=90,
            statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)

        # 6. End the job
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment="Finished.")
Esempio n. 4
0
def main(argv):
    # 0. Initialize Cytomine client and job if necessary and parse inputs
    with NeubiasJob.from_cli(argv) as nj:
        problem_cls = get_discipline(nj, default=CLASS_OBJTRK)
        is_2d = False
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Running workflow for problem class '{}' in 2D+t".format(problem_cls))

        # 1. Create working directories on the machine
        # 2. Download the images
        nj.job.update(progress=0, statusComment="Initialisation...")
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")

        # DEBUG Workflow not currently working !!
        # # CellTrackingChallenge expects the protocol to start with the data folder name
        # in_path_folder_name, in_folder_name = os.path.basename(os.path.dirname(in_path)), os.path.basename(in_path)
        # shutil.copyfile("/app/proto.protocol", "/app/{}-{}.protocol".format(in_path_folder_name, in_folder_name))
        # command = "java -Xmx2048m -jar icy.jar -hl -x plugins.adufour.ctc.CellTrackingChallenge {} {}".format(os.path.dirname(in_path), os.path.basename(in_path))
        # return_code = call(command, shell=True, cwd="/app")  # waits for the subprocess to return
        #
        # if return_code != 0:
        #     err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code)
        #     nj.job.update(progress=50, statusComment=err_desc)
        #     raise ValueError(err_desc)
        #
        # # move files generated by CellTrackingChallenge into the output folder
        # res_path = in_path + "_RES"
        # for file in os.listdir(res_path):
        #     shutil.move(os.path.join(res_path, file), out_path)

        # DEBUG copy ground truth in output
        for file in os.listdir(gt_path):
            outfile = file.replace("_attached", "") if file.endswith(".txt") else file
            shutil.copy(os.path.join(gt_path, file), os.path.join(out_path, outfile))

        # 4. Upload the annotation and labels to Cytomine
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
            "start": 60, "end": 90, "period": 0.1
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=90, statusComment="Computing and uploading metrics...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        # 6. End
        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job
    with NeubiasJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...")

        problem_cls = CLASS_OBJSEG
        is_2d = False

        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment="Launching workflow...")
        command = "/usr/bin/xvfb-run java -Xmx6000m -cp /fiji/jars/ij.jar ij.ImageJ --headless --console " \
                  "-macro macro.ijm \"input={}, output={}, radius={}, min_threshold={}\"".format(in_path, out_path, nj.parameters.ij_radius, nj.parameters.ij_min_threshold)
        return_code = call(command, shell=True, cwd="/fiji")  # waits for the subprocess to return

        if return_code != 0:
            err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code)
            nj.job.update(progress=50, statusComment=err_desc)
            raise ValueError(err_desc)
            
        # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using
        # the AnnotationExporter module)
        upload_data(problem_cls, nj, in_images, out_path, **nj.flags, is_2d=is_2d, monitor_params={
            "start": 60, "end": 90,
            "period": 0.1,
            "prefix": "Extracting and uploading polygons from masks"
        })

        # 5. Compute and upload the metrics
        nj.job.update(progress=80, statusComment="Computing and uploading metrics (if necessary)...")
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags)

        nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def main():
	with NeubiasJob.from_cli(sys.argv) as conn:
		problem_cls = get_discipline(conn, default=CLASS_LNDDET)
		conn.job.update(status=Job.RUNNING, progress=0, statusComment="Initialization of the prediction phase")
		in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, conn, is_2d=True, **conn.flags)
		list_imgs = [int(image.rstrip('.tif')) for image in os.listdir(in_path) if image.endswith('.tif')]
		train_job = Job().fetch(conn.parameters.model_to_use)
		properties = PropertyCollection(train_job).fetch()
		str_terms = ""
		for prop in properties:
			if prop.fetch(key='id_terms') != None:
				str_terms = prop.fetch(key='id_terms').value
		term_list = [int(x) for x in str_terms.split(' ')]
		attached_files = AttachedFileCollection(train_job).fetch()

		feature_file = find_by_attribute(attached_files, "filename", "features.joblib")
		feature_filepath = os.path.join(in_path, "features.joblib")
		feature_file.download(feature_filepath, override=True)
		(h2,v2,h3,v3,sq) = joblib.load(feature_filepath)

		coords_file = find_by_attribute(attached_files, "filename", "coords.joblib")
		coords_filepath = os.path.join(in_path, "coords.joblib")
		coords_file.download(coords_filepath, override=True)
		(Xc, Yc) = joblib.load(coords_filepath)
		(nims, nldms) = Xc.shape

		coords = np.zeros(2 * nldms)
		i = 0
		for id_term in conn.monitor(term_list, start=10, end=50, period = 0.05, prefix="Building vote maps..."):
			model_file = find_by_attribute(attached_files, "filename", "%d_model.joblib" % id_term)
			model_filepath = os.path.join(in_path, "%d_model.joblib" % id_term)
			model_file.download(model_filepath, override=True)
			clf = joblib.load(model_filepath)
			mx = np.mean(Xc[:, id_term-1])
			my = np.mean(Yc[:, id_term-1])
			coords[i] = mx
			coords[i+nldms] = my
			i+=1
			for j in list_imgs:
				print(j)
				vote_map = build_vote_map(in_path, j, clf, h2, v2, h3, v3, sq, conn.parameters.model_step)
				np.savez_compressed('%d_%d_votemap.npy' % (j, id_term), vote_map)

		muP_file = find_by_attribute(attached_files, "filename", "muP.joblib")
		muP_filepath = os.path.join(in_path, "muP.joblib")
		muP_file.download(muP_filepath, override=True)
		(mu, P) = joblib.load(muP_filepath)
		(nims, nldms) = Xc.shape
		for id_img in conn.monitor(list_imgs, start=50, end=80, period = 0.05, prefix="Post-processing..."):
			probability_map = np.load('%d_%d_votemap.npy.npz' % (id_img, term_list[0]))['arr_0']
			(hpmap,wpmap) = probability_map.shape
			probability_volume = np.zeros((hpmap,wpmap,len(term_list)))
			probability_volume[:,:,0] = probability_map
			for i in range(1,len(term_list)):
				id_term = term_list[i]
				probability_volume[:, :, i] = np.load('%d_%d_votemap.npy.npz'%(id_img, id_term))['arr_0']
			current_R = conn.parameters.model_R_MAX
			while current_R >= conn.parameters.model_R_MIN:
				coords = np.round(find_best_positions(probability_volume, coords, int(np.round(current_R)))).astype(int)
				coords = np.round(fit_shape(mu, P, coords)).astype(int)
				current_R = current_R * conn.parameters.model_alpha
			x_final = np.round(coords[:nldms])
			y_final = np.round(coords[nldms:])
			lbl_img = np.zeros((hpmap, wpmap), 'uint8')
			for i in range(nldms):
				lbl_img[int(y_final[i]), int(x_final[i])] = term_list[i]
			imwrite(path=os.path.join(out_path, '%d.tif' % id_img), image=lbl_img.astype(np.uint8), is_2d=True)

		upload_data(problem_cls, conn, in_images, out_path, **conn.flags, is_2d=True,
					monitor_params={"start": 80, "end": 90, "period": 0.1})
		conn.job.update(progress=90, statusComment="Computing and uploading metrics (if necessary)...")
		upload_metrics(problem_cls, conn, in_images, gt_path, out_path, tmp_path, **conn.flags)
		conn.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
Esempio n. 7
0
def main():
	with NeubiasJob.from_cli(sys.argv) as conn:
		problem_cls = get_discipline(conn, default=CLASS_LNDDET)
		is_2d = True
		conn.job.update(status=Job.RUNNING, progress=0, statusComment="Initialization of the prediction phase")
		in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, conn, is_2d=is_2d, **conn.flags)
		list_imgs = [int(image.rstrip('.tif')) for image in os.listdir(in_path) if image.endswith('.tif')]

		train_job = Job().fetch(conn.parameters.model_to_use)
		properties = PropertyCollection(train_job).fetch()
		str_terms = ""
		for prop in properties:
			if prop.fetch(key='id_terms')!=None:
				str_terms = prop.fetch(key='id_terms').value
		term_list = [int(x) for x in str_terms.split(' ')]
		attached_files = AttachedFileCollection(train_job).fetch()

		hash_pos = {}
		hash_size = {}
		for id_term in conn.monitor(term_list, start=10, end=70, period = 0.05, prefix="Finding landmarks for terms..."):
			model_file = find_by_attribute(attached_files, "filename", "%d_model.joblib"%id_term)
			model_filepath = os.path.join(in_path, "%d_model.joblib"%id_term)
			model_file.download(model_filepath, override=True)
			cov_file = find_by_attribute(attached_files, 'filename', '%d_cov.joblib'%id_term)
			cov_filepath = os.path.join(in_path, "%d_cov.joblib"%id_term)
			cov_file.download(cov_filepath, override=True)
			parameters_file = find_by_attribute(attached_files, 'filename', '%d_parameters.joblib'%id_term)
			parameters_filepath = os.path.join(in_path, '%d_parameters.joblib'%id_term)
			parameters_file.download(parameters_filepath, override=True)

			model = joblib.load(model_filepath)
			[mx, my, cm] = joblib.load(cov_filepath)
			parameters_hash = joblib.load(parameters_filepath)
			feature_parameters = None
			if parameters_hash['feature_type'] in ['haar', 'gaussian']:
				fparameters_file = find_by_attribute(attached_files, 'filename', "%d_fparameters.joblib"%id_term)
				fparametersl_filepath = os.path.join(in_path, "%d_fparameters.joblib"%id_term)
				fparameters_file.download(fparametersl_filepath, override=True)
				feature_parameters = joblib.load(fparametersl_filepath)
			for id_img in list_imgs:
				(x, y, height, width) = searchpoint_cytomine(in_path, id_img, model, mx, my, cm, 1. / (2. ** np.arange(parameters_hash['model_depth'])), parameters_hash['window_size'], parameters_hash['feature_type'], feature_parameters, 'tif', parameters_hash['model_npred'])
				if (not id_img in hash_size):
					hash_size[id_img] = (height, width)
					hash_pos[id_img] = []
				hash_pos[id_img].append(((id_term, x, y)))
		conn.job.update(status=Job.RUNNING, progress=95, statusComment="Uploading the results...")
		for id_img in list_imgs:
			(h, w) = hash_size[id_img]
			lbl_img = np.zeros((h, w), 'uint8')
			for (id_term, x, y) in hash_pos[id_img]:
				intx = int(x)
				inty = int(y)
				if lbl_img[inty, intx] > 0:
					(ys, xs) = np.where(lbl_img==0)
					dis = np.sqrt((ys-y)**2 + (xs-x)**2)
					j = np.argmin(dis)
					intx = int(xs[j])
					inty = int(ys[j])
				lbl_img[inty, intx] = id_term
			imwrite(path=os.path.join(out_path, '%d.tif'%id_img), image=lbl_img.astype(np.uint8), is_2d=is_2d)
		upload_data(problem_cls, conn, in_images, out_path, **conn.flags, is_2d=is_2d, monitor_params={"start": 70, "end": 90, "period": 0.1})
		conn.job.update(progress=90, statusComment="Computing and uploading metrics (if necessary)...")
		upload_metrics(problem_cls, conn, in_images, gt_path, out_path, tmp_path, **conn.flags)
		conn.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def main(argv):
    # 0. Initialize Cytomine client and job
    with NeubiasJob.from_cli(argv) as nj:
        nj.job.update(status=Job.RUNNING,
                      progress=0,
                      statusComment='Initialisation...')
        problem_cls = CLASS_TRETRC
        is_2d = False
        threshold_value = nj.parameters.threshold_value
        auto_downsampled = nj.parameters.auto_downsampled
        # 1. Create working directories on the machine
        # 2. Download the images
        in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(
            problem_cls, nj, is_2d=is_2d, **nj.flags)

        # 3. Call the image analysis workflow using the run script
        nj.job.update(progress=25, statusComment='Launching workflow...')
        workflow(in_images, out_path, threshold_value, auto_downsampled)

        #if return_code != 0:
        #   err_desc = 'Failed to execute the Vaa3D (return code: {})'.format(return_code)
        #nj.job.update(progress=50, statusComment=err_desc)
        #    raise ValueError(err_desc)
        print('files in out_path ' + out_path + ': ')
        for file in glob.glob(out_path + '/*'):
            print(file)

        #files = (glob.glob(in_path+'/*.tif'))
        #print('Removing flipped images...')
        #for i in range(0,len(files)):
        #    files[i] = files[i].replace('/in/','/out/')
        #    print(files[i])
        #for out_file in files:
        #    os.remove(out_file)

        # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using
        # the AnnotationExporter module
        upload_data(problem_cls,
                    nj,
                    in_images,
                    out_path,
                    **nj.flags,
                    projection=-1,
                    is_2d=is_2d,
                    monitor_params={
                        "start": 60,
                        "end": 90,
                        "period": 0.1,
                        "prefix":
                        "Extracting and uploading polygons from masks"
                    })

        #5. Compute and upload the metrics
        nj.job.update(
            progress=80,
            statusComment='Computing and uploading metrics (if necessary)...')
        upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path,
                       **nj.flags)
        nj.job.update(status=Job.TERMINATED,
                      progress=100,
                      statusComment='Finished.')
Esempio n. 9
0
def main():
	with NeubiasJob.from_cli(sys.argv) as conn:
		problem_cls = get_discipline(conn, default=CLASS_LNDDET)
		conn.job.update(status=Job.RUNNING, progress=0, statusComment="Initialization of the prediction phase")
		in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data(problem_cls, conn, is_2d=True, **conn.flags)
		train_job = Job().fetch(conn.parameters.model_to_use)
		properties = PropertyCollection(train_job).fetch()
		str_terms = ""
		for prop in properties:
			if prop.fetch(key='id_terms') != None:
				str_terms = prop.fetch(key='id_terms').value
		term_list = [int(x) for x in str_terms.split(' ')]
		attached_files = AttachedFileCollection(train_job).fetch()
		model_file = find_by_attribute(attached_files, "filename", "model_phase1.joblib")
		model_filepath = os.path.join(in_path, "model_phase1.joblib")
		model_file.download(model_filepath, override=True)
		clf = joblib.load(model_filepath)
		pr_ims = [int(p) for p in conn.parameters.cytomine_predict_images.split(',')]
		tifimg = readimage(in_path, pr_ims[0], image_type='tif')
		init_h = 100
		init_w = 100
		if len(tifimg.shape)==3:
			(init_h, init_w, init_d) = tifimg.shape
		else:
			(init_h, init_w) = tifimg.shape
		offset_file = find_by_attribute(attached_files, "filename", "offsets_phase1.joblib")
		offset_filepath = os.path.join(in_path, "offsets_phase1.joblib")
		offset_file.download(offset_filepath, override=True)
		feature_offsets_1 = joblib.load(offset_filepath)
		train_parameters = {}
		for hashmap in train_job.jobParameters:
			train_parameters[hashmap['name']] = hashmap['value']
		train_parameters['model_delta'] = float(train_parameters['model_delta'])
		train_parameters['model_sde'] = float(train_parameters['model_sde'])
		train_parameters['model_T'] = int(train_parameters['model_T'])
		for j in conn.monitor(pr_ims, start=10, end=33, period=0.05,prefix="Phase 1 for images..."):
			probability_map = probability_map_phase_1(in_path, j, clf, feature_offsets_1, float(train_parameters['model_delta']))
			filesave = os.path.join(out_path, 'pmap_%d.npy'%j)
			np.savez_compressed(filesave,probability_map)

		clf = None

		coords_file = find_by_attribute(attached_files, "filename", "coords.joblib")
		coords_filepath = os.path.join(in_path, "coords.joblib")
		coords_file.download(coords_filepath, override=True)
		(Xc, Yc) = joblib.load(coords_filepath)

		for j in conn.monitor(pr_ims, start=33, end=66, period=0.05,prefix="Phase 2 for images..."):
			filesave = os.path.join(out_path, 'pmap_%d.npy.npz' % j)
			probability_map = np.load(filesave)['arr_0']
			for id_term in term_list:
				reg_file = find_by_attribute(attached_files, "filename", "reg_%d_phase2.joblib"%id_term)
				reg_filepath = os.path.join(in_path, "reg_%d_phase2.joblib"%id_term)
				reg_file.download(reg_filepath, override=True)
				reg = joblib.load(reg_filepath)
				off_file = find_by_attribute(attached_files, "filename", 'offsets_%d_phase2.joblib' % id_term)
				off_filepath = os.path.join(in_path, 'offsets_%d_phase2.joblib' % id_term)
				off_file.download(off_filepath, override=True)
				feature_offsets_2 = joblib.load(off_filepath)
				probability_map_phase_2 = agregation_phase_2(in_path, j, id_term-1, probability_map, reg, train_parameters['model_delta'], feature_offsets_2, conn.parameters.model_filter_size, conn.parameters.model_beta, conn.parameters.model_n_iterations)
				filesave = os.path.join(out_path, 'pmap2_%d_%d.npy' % (j, id_term))
				np.savez_compressed(filesave, probability_map_phase_2)

		edge_file = find_by_attribute(attached_files, "filename", "model_edges.joblib")
		edge_filepath = os.path.join(in_path, "model_edges.joblib")
		edge_file.download(edge_filepath, override=True)
		edges = joblib.load(edge_filepath)
		for j in conn.monitor(pr_ims, start=66, end=90, period=0.05,prefix="Phase 3 for images..."):
			filesave = os.path.join(out_path, 'pmap2_%d_%d.npy.npz' % (j, term_list[0]))
			probability_map = np.load(filesave)['arr_0']
			(hpmap,wpmap) = probability_map.shape
			probability_volume = np.zeros((hpmap,wpmap,len(term_list)))
			probability_volume[:,:,0] = probability_map
			for i in range(1,len(term_list)):
				filesave = os.path.join(out_path, 'pmap2_%d_%d.npy.npz' % (j, term_list[i]))
				probability_volume[:,:,i] = np.load(filesave)['arr_0']
			x_final, y_final = compute_final_solution_phase_3(Xc, Yc, probability_volume, conn.parameters.model_n_candidates, train_parameters['model_sde'], train_parameters['model_delta'], train_parameters['model_T'], edges)
			lbl_img = np.zeros((init_h, init_w), 'uint8')
			for i in range(x_final.size):
				x = min(init_w-1, max(0, int(x_final[i])))
				y = min(init_h-1, max(0, int(y_final[i])))
				lbl_img[y, x] = term_list[i]
			imwrite(path=os.path.join(out_path, '%d.tif' % j), image=lbl_img.astype(np.uint8), is_2d=True)
		upload_data(problem_cls, conn, in_images, out_path, **conn.flags, is_2d=True, monitor_params={"start": 90, "end": 95, "period": 0.1})
		conn.job.update(progress=90, statusComment="Computing and uploading metrics (if necessary)...")
		upload_metrics(problem_cls, conn, in_images, gt_path, out_path, tmp_path, **conn.flags)
		conn.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")