def main(argv): # 0. Initialize Cytomine client and job with BiaflowsJob.from_cli(argv) as nj: nj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...") problem_cls = CLASS_TRETRC is_2d = False # 1. Create working directories on the machine # 2. Download the images in_images, gt_images, in_path, gt_path, out_path, tmp_path = prepare_data( problem_cls, nj, is_2d=is_2d, **nj.flags) # 3. Call the image analysis workflow using the run script nj.job.update(progress=25, statusComment="Launching workflow...") workflow(in_images, out_path) #if return_code != 0: # err_desc = "Failed to execute the Vaa3D (return code: {})".format(return_code) #nj.job.update(progress=50, statusComment=err_desc) # raise ValueError(err_desc) print('files in out_path ' + out_path + ': ') for file in glob.glob(out_path + '/*'): print(file) #files = (glob.glob(in_path+"/*.tif")) #print('Removing flipped images...') #for i in range(0,len(files)): # files[i] = files[i].replace('/in/','/out/') # print(files[i]) #for out_file in files: # os.remove(out_file) # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using # the AnnotationExporter module upload_data(problem_cls, nj, in_images, out_path, **nj.flags, projection=-1, is_2d=is_2d, monitor_params={ "start": 60, "end": 90, "period": 0.1, "prefix": "Extracting and uploading polygons from masks" }) #5. Compute and upload the metrics nj.job.update( progress=80, statusComment="Computing and uploading metrics (if necessary)...") upload_metrics(problem_cls, nj, in_images, gt_path, out_path, tmp_path, **nj.flags) nj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
def run(query, sid): start = datetime.now().microsecond data = workflow(query, sid, sessions) end = datetime.now().microsecond data["sid"] = sid delta = (end - start) / 1000 data["ms"] = delta sessions[sid] = data logging.info(f"Query {query} in {delta}ms")
import ROOT as r from workflow import workflow f = r.TFile("../tuples/bae-mc-11114003-2012-down.root") t = f.Get("bar-muon-tuple/DecayTree") fnew = r.TFile("newtree.root","RECREATE") tnew = t.CloneTree(-1,'fast') tnew.SetBranchStatus('*',0) myworkflow = workflow() myworkflow.module_names.append('addRestFrameVars') myworkflow.run(tnew) fnew.cd() tnew.Write() fnew.Close()
import ROOT as r from workflow import workflow fsig = r.TFile("../bae-mc-12215002-2012-down.root") tsig = fsig.Get("bar-muon-tuple/DecayTree") f = r.TFile("../BuKMuMuX.root") t = f.Get("Bplus_Tuple/DecayTree") fnew = r.TFile("newtree.root","RECREATE") tnew = t.CloneTree(-1,'fast') tnew.SetBranchStatus('*',0) myworkflow = workflow() myworkflow.module_names.append('addRestFrameVars') myworkflow.run(tnew)
def main(argv): # 0. Initialize Cytomine client and job with CytomineJob.from_cli(argv) as cj: cj.job.update(status=Job.RUNNING, progress=0, statusComment="Initialisation...") # 1. Create working directories on the machine: # - WORKING_PATH/in: input images # - WORKING_PATH/out: output images # - WORKING_PATH/ground_truth: ground truth images base_path = "{}".format(os.getenv("HOME")) gt_suffix = "_lbl" working_path = os.path.join(base_path, str(cj.job.id)) in_path = os.path.join(working_path, "in") out_path = os.path.join(working_path, "out") gt_path = os.path.join(working_path, "ground_truth") if not os.path.exists(working_path): os.makedirs(working_path) os.makedirs(in_path) os.makedirs(out_path) os.makedirs(gt_path) # 2. Download the images (first input, then ground truth image) cj.job.update( progress=1, statusComment="Downloading images (to {})...".format(in_path)) image_group = ImageGroupCollection().fetch_with_filter( "project", cj.parameters.cytomine_id_project) input_images = [i for i in image_group if gt_suffix not in i.name] gt_images = [i for i in image_group if gt_suffix in i.name] for input_image in input_images: input_image.download(os.path.join(in_path, "{id}.tif")) for gt_image in gt_images: related_name = gt_image.name.replace(gt_suffix, '') related_image = [i for i in input_images if related_name == i.name] if len(related_image) == 1: gt_image.download( os.path.join(gt_path, "{}.tif".format(related_image[0].id))) # 3. Call the image analysis workflow using the run script cj.job.update(progress=25, statusComment="Launching workflow...") #TODO: error handling workflow(in_path, out_path) # if return_code != 0: # err_desc = "Failed to execute the ImageJ macro (return code: {})".format(return_code) # cj.job.update(progress=50, statusComment=err_desc) # raise ValueError(err_desc) # 4. Upload .swc and attach to correponding image # ! not needed if we compute directly the metric for image in cj.monitor( input_images, start=60, end=80, period=0.1, prefix="Extracting and uploading polygons from masks"): afile = "{}.swc".format(image.id) path = os.path.join(out_path, afile) AttachedFile(image, filename=path).upload() # 4. Upload the annotation and labels to Cytomine (annotations are extracted from the mask using # the AnnotationExporter module) # for image in cj.monitor(input_images, start=60, end=80, period=0.1, prefix="Extracting and uploading polygons from masks"): # file = "{}.tif".format(image.id) # path = os.path.join(out_path, file) # data = io.imread(path) # extract objects # slices = mask_to_objects_2d(data) # print("Found {} polygons in this image {}.".format(len(slices), image.id)) # upload # collection = AnnotationCollection() # for obj_slice in slices: # collection.append(Annotation( # location=affine_transform(obj_slice.polygon, [1, 0, 0, -1, 0, image.height]).wkt, # id_image=image.id, id_project=cj.parameters.cytomine_id_project, property=[ # {"key": "index", "value": str(obj_slice.label)} # ] # )) # collection.save() # 5. Compute the metrics cj.job.update(progress=80, statusComment="Computing metrics...") # TODO: compute metrics: # in /out: output files {id}.tiff # in /ground_truth: label files {id}.tiff cj.job.update(progress=99, statusComment="Cleaning...") for image in input_images: os.remove(os.path.join(in_path, "{}.tif".format(image.id))) cj.job.update(status=Job.TERMINATED, progress=100, statusComment="Finished.")
# type: ignore # pylint: disable-all # isort: skip import sys from workflow import run_workflow as workflow sys.path.append('./tests/profiling') if __name__ == '__main__': workflow()