def keep_blobs_bigger_than(imp, min_size_pix=100): """remove all blobs other than the largest by area""" imp.killRoi() rt = ResultsTable() if "Size_filtered_" in imp.getTitle(): title_addition = "" else: title_addition = "Size_filtered_" out_imp = IJ.createImage("{}{}".format(title_addition, imp.getTitle()), imp.getWidth(), imp.getHeight(), 1, 8) out_imp.show() IJ.run(out_imp, "Select All", "") IJ.run(out_imp, "Set...", "value=0 slice") mxsz = imp.width * imp.height roim = RoiManager() pa = ParticleAnalyzer(ParticleAnalyzer.ADD_TO_MANAGER, ParticleAnalyzer.AREA | ParticleAnalyzer.SLICE, rt, min_size_pix, mxsz) pa.setRoiManager(roim) roim.reset() rt.reset() pa.analyze(imp) rt_areas = rt.getColumn(rt.getColumnIndex("Area")).tolist() # print("Number of cells identified: {}".format(len(rt_areas))); for idx in range(len(rt_areas)): roim.select(out_imp, idx) IJ.run(out_imp, "Set...", "value=255 slice") mx_ind = rt_areas.index(max(rt_areas)) roim.reset() roim.close() imp.changes = False imp.close() return out_imp
def keep_largest_blob(imp): """remove all blobs other than the largest by area""" rt = ResultsTable() mxsz = imp.width * imp.height roim = RoiManager(False) pa = ParticleAnalyzer(ParticleAnalyzer.ADD_TO_MANAGER, ParticleAnalyzer.AREA | ParticleAnalyzer.SLICE, rt, 0, mxsz) pa.setRoiManager(roim) for idx in range(1, imp.getImageStackSize() + 1): roim.reset() rt.reset() imp.setPosition(idx) pa.analyze(imp) rt_areas = rt.getColumn(rt.getColumnIndex("Area")).tolist() mx_ind = rt_areas.index(max(rt_areas)) indices_to_remove = [ a for a in range(0, len(rt_areas)) if a != mx_ind ] indices_to_remove.reverse() for rem_idx in indices_to_remove: roim.select(imp, rem_idx) IJ.run(imp, "Set...", "value=0 slice") imp.killRoi() roim.reset() roim.close()
def __localwand(self, x, y, ip, seuil, method, light): self.__image.killRoi() ip.snapshot() if method == "mean" : peak=ip.getPixel(x,y) tol = (peak - self.getMean())*seuil w = Wand(ip) w.autoOutline(x, y, tol, Wand.EIGHT_CONNECTED) #print "method=", method, tol, peak elif method == "background" : radius = self.getMinF()/4 bs = BackgroundSubtracter() #rollingBallBackground(ImageProcessor ip, double radius, boolean createBackground, boolean lightBackground, boolean useParaboloid, boolean doPresmooth, boolean correctCorners) bs.rollingBallBackground(ip, radius, False, light, False, True, False) peak=ip.getPixel(x,y) tol = peak*seuil w = Wand(ip) w.autoOutline(x, y, tol, Wand.EIGHT_CONNECTED) ip.reset() #print "method=", method, tol, radius, peak else : peak=ip.getPixel(x,y) tol = peak*seuil w = Wand(ip) w.autoOutline(x, y, tol, Wand.EIGHT_CONNECTED) #print "method=", method, tol peak=ip.getPixel(x,y) temproi=PolygonRoi(w.xpoints, w.ypoints, w.npoints, PolygonRoi.POLYGON) self.__image.setRoi(temproi) #self.__image.show() #time.sleep(1) #peakip=self.__image.getProcessor() #stats=peakip.getStatistics() temprt = ResultsTable() analyser = Analyzer(self.__image, Analyzer.AREA+Analyzer.INTEGRATED_DENSITY+Analyzer.FERET, temprt) analyser.measure() #temprt.show("temprt") rtValues=temprt.getRowAsString(0).split("\t") area=float(rtValues[1]) intDen=float(rtValues[4]) feret=float(rtValues[2]) mean=intDen/area #time.sleep(2) temprt.reset() self.__image.killRoi() return [peak, area, mean, intDen, feret]
def AnalyzeParticle(IMP): rm = RoiManager().getInstance2() rt = ResultsTable() #再現性確保のために最終的には実装 #IJ.run("Set Measurements...","area centroid fit redirect=None decimal=3") #https://imagej.nih.gov/ij/developer/api/constant-values.html#ij.plugin.filter.ParticleAnalyzer.SHOW_RESULTS #表示オプション無し、resultは全部選択 PA = ParticleAnalyzer(0 , 1043199 , rt, 10000, 300000, 0.2, 1.0) PA.setRoiManager(rm) PA.analyze(IMP) #IJ.run(IMP, "Analyze Particles...", "display clear include add") rm.runCommand("Save", "C:/Users/For Programming/Documents/Python Scripts/OutletHDD/aaa.zip") rt.saveAs("C:/Users/For Programming/Documents/Python Scripts/OutletHDD/aaa.csv") #最後に全ての結果をCloseする。 #写真を先に消さないとバグる。 IMP.close() rm.close() rt.reset()
# Morphological dilate binner.setup('dilate', None) clusters = 0 initialCells = 0 # dilate by 'SAMPLEITER' for i in range(SAMPLEITER+1): p.analyze(binimp) cellcounts = rt.getCounter() if i == 0: initialCells = cellcounts #IJ.log("iter:" + str(i) + " -- cell counts: " + str(cellcounts)) if i == SAMPLEITER: clusters = cellcounts binner.run(binimp.getProcessor()) rt.reset() #binimp.show() #binorg.show() IJ.log("==== " + imp3.getTitle() + " =====") IJ.log("Number of Nucleus : " + str(initialCells)) IJ.log("Clusters at dilation " + str(SAMPLEITER) + ": " + str(clusters)) IJ.log("Clusters/Nucleus " + str(float(clusters)/float(initialCells)))
summary_rt.setValue('AIS start', row, ais_start) summary_rt.setValue('AIS length', row, ais_length) # Main code inputdir = str(inputdir) outputdir = str(outputdir) ais_method = ais_method.lower() ais_threshold /=100 if not path.isdir(inputdir): print inputdir, 'does not exist or is not a directory.' else: summary_rt = ResultsTable.getResultsTable(AIS_SUMMARY_TABLE) if summary_rt is None: summary_rt = ResultsTable() elif clear_summary: summary_rt.reset() if not path.isdir(outputdir): os.makedirs(outputdir) file_pairs = get_file_pairs(inputdir) for item in file_pairs: overlay = Overlay() composite,imps = open_image(item['img']) rois = load_rois(item['roi']) if len(imps) < ais_chno or len(imps) < nucleus_chno: print 'Image %s has %d channels. Cannot process AIS segmentation for channel %d. Skipping.' % (item['img'], len(imps), ais_chno) else: if show_img: composite.show() #for i in imps: # i.show() results, background = process_image(imps, rois, ais_chno, nucleus_chno, bg_roino=3, average=average, sample_width=ais_linewidth, method=ais_method, threshold=ais_threshold)
def saveresults(dir, name): outfile = os.path.join(dir, "{}.csv".format(name)) res = ResultsTable.getResultsTable() ResultsTable.save(res, outfile) ResultsTable.reset(res)
def merge_incorrect_splits_and_get_centroids(imp, centroid_distance_limit=100, size_limit=100): """if particles are found with centroids closer than centroid_distance_limit and both have size<size_limit, get average centroid""" imp.killRoi() rt = ResultsTable() out_imp = IJ.createImage("Nuclei centroids from {}".format(imp.getTitle()), imp.getWidth(), imp.getHeight(), 1, 8) out_imp.show() IJ.run(out_imp, "Select All", "") IJ.run(out_imp, "Set...", "value=0 slice") out_imp.show() cal = imp.getCalibration() mxsz = imp.width * cal.pixelWidth * imp.height * cal.pixelHeight print("mxsz = {}".format(mxsz)) roim = RoiManager() imp.show() pa = ParticleAnalyzer( ParticleAnalyzer.ADD_TO_MANAGER, ParticleAnalyzer.AREA | ParticleAnalyzer.SLICE | ParticleAnalyzer.CENTROID, rt, 0, size_limit) pa.setRoiManager(roim) roim.reset() rt.reset() pa.analyze(imp) MyWaitForUser("paise", "pause post-merge incorrect splits particel analysis") rt_xs = rt.getColumn(rt.getColumnIndex("X")).tolist() rt_ys = rt.getColumn(rt.getColumnIndex("Y")).tolist() centroids = [(x, y) for x, y in zip(rt_xs, rt_ys)] print("centroids = {}".format(centroids)) centroids_set = set() for c in centroids: ds = [ math.sqrt((c[0] - cx)**2 + (c[1] - cy)**2) for (cx, cy) in centroids ] close_mask = [d < centroid_distance_limit for d in ds] # if no other centroids are within centroid_distance_limit, add this centroid to the output set # otherwise, add the average position of this centroid and those within centroid_distance_limit to the output set centroids_set.add( (sum([msk * b[0] for msk, b in zip(close_mask, centroids)]) / sum(close_mask), sum([msk * b[1] for msk, b in zip(close_mask, centroids)]) / sum(close_mask))) roim.reset() rt.reset() pa = ParticleAnalyzer( ParticleAnalyzer.ADD_TO_MANAGER, ParticleAnalyzer.AREA | ParticleAnalyzer.SLICE | ParticleAnalyzer.CENTROID, rt, size_limit, mxsz) pa.setRoiManager(roim) pa.analyze(imp) MyWaitForUser("paise", "pause post-merge incorrect splits particel analysis 2") if rt.columnExists("X"): rt_xs = rt.getColumn(rt.getColumnIndex("X")).tolist() rt_ys = rt.getColumn(rt.getColumnIndex("Y")).tolist() centroids = [(x, y) for x, y in zip(rt_xs, rt_ys)] for c in centroids: centroids_set.add(c) centroids = list(centroids_set) cal = imp.getCalibration() centroids = [(c[0] / cal.pixelWidth, c[1] / cal.pixelHeight) for c in centroids] print("new number of nuclei identified = {}".format(len(centroids))) roim.reset() roim.close() for idx, c in enumerate(centroids): roi = OvalRoi(c[0], c[1], 10, 10) out_imp.setRoi(roi) IJ.run(out_imp, "Set...", "value={} slice".format(idx + 1)) imp.changes = False #imp.close(); return out_imp
def generate_background_rois(input_mask_imp, params, membrane_edges, dilations=5, threshold_method=None, membrane_imp=None): """automatically identify background region based on auto-thresholded image, existing membrane edges and position of midpoint anchor""" if input_mask_imp is None and membrane_imp is not None: segmentation_imp = Duplicator().run(membrane_imp) # do thresholding using either previous method if threhsold_method is None or using (less conservative?) threshold method if (threshold_method is None or not (threshold_method in params.listThresholdMethods())): mask_imp = make_and_clean_binary(segmentation_imp, params.threshold_method) else: mask_imp = make_and_clean_binary(segmentation_imp, threshold_method) segmentation_imp.close() else: input_mask_imp.killRoi() mask_imp = Duplicator().run(input_mask_imp) rois = [] IJ.setForegroundColor(0, 0, 0) roim = RoiManager(True) rt = ResultsTable() for fridx in range(mask_imp.getNFrames()): mask_imp.setT(fridx + 1) # add extra bit to binary mask from loaded membrane in case user refined edges... # flip midpoint anchor across the line joining the two extremes of the membrane, # and fill in the triangle made by this new point and those extremes poly = membrane_edges[fridx].getPolygon() l1 = (poly.xpoints[0], poly.ypoints[0]) l2 = (poly.xpoints[-1], poly.ypoints[-1]) M = (0.5 * (l1[0] + l2[0]), 0.5 * (l1[1] + l2[1])) Mp1 = (params.manual_anchor_midpoint[0][0] - M[0], params.manual_anchor_midpoint[0][1] - M[1]) p2 = (M[0] - Mp1[0], M[1] - Mp1[1]) new_poly_x = list(poly.xpoints) new_poly_x.append(p2[0]) new_poly_y = list(poly.ypoints) new_poly_y.append(p2[1]) mask_imp.setRoi(PolygonRoi(new_poly_x, new_poly_y, PolygonRoi.POLYGON)) IJ.run(mask_imp, "Fill", "slice") mask_imp.killRoi() # now dilate the masked image and identify the unmasked region closest to the midpoint anchor ip = mask_imp.getProcessor() dilations = 5 for d in range(dilations): ip.dilate() ip.invert() mask_imp.setProcessor(ip) mxsz = mask_imp.getWidth() * mask_imp.getHeight() pa = ParticleAnalyzer( ParticleAnalyzer.ADD_TO_MANAGER | ParticleAnalyzer.SHOW_PROGRESS, ParticleAnalyzer.CENTROID, rt, 0, mxsz) pa.setRoiManager(roim) pa.analyze(mask_imp) ds_to_anchor = [ math.sqrt((x - params.manual_anchor_midpoint[0][0])**2 + (y - params.manual_anchor_midpoint[0][1])**2) for x, y in zip( rt.getColumn(rt.getColumnIndex("X")).tolist(), rt.getColumn(rt.getColumnIndex("Y")).tolist()) ] if len(ds_to_anchor) > 0: roi = roim.getRoi(ds_to_anchor.index(min(ds_to_anchor))) rois.append(roi) else: rois.append(None) roim.reset() rt.reset() roim.close() mask_imp.close() return rois
rt = ResultsTable(); #rm = RoiManager.getInstance(); #print(rm) rm = RoiManager(False); pa = ParticleAnalyzer((ParticleAnalyzer.ADD_TO_MANAGER | ParticleAnalyzer.SHOW_MASKS), (Measurements.CENTROID | Measurements.STACK_POSITION), rt, 500, 30000, 0.0, 1.0) pa.setHideOutputImage(False) keep_rois = []; pa.analyze(imp); IJ.run("Set Measurements...", "centroid redirect=None decimal=3"); frames = imp.getNFrames(); for fridx in range(0, frames): rt.reset(); imp.setSliceWithoutUpdate(fridx + 1); ip = imp.getProcessor(); if not pa.analyze(imp, ip): raise Exception("something went wrong analysing particles!") rt.show("centroids"); rm = RoiManager.getInstance(); if rm.getCount() > 0: rois = rm.getRoisAsArray(); centroidsx = rt.getColumn(rt.getColumnIndex('X')); centroidsy = rt.getColumn(rt.getColumnIndex('Y')); print(centroidsx); print(centroidsy); gd = GenericDialog("Continue?"); gd.showDialog(); if gd.wasCanceled():
my_gmd = Log().value(gmd_nm) my_gsd = Log().value(gsd) distn = LogNormalDistribution(my_gmd, my_gsd) the_sample = distn.sample() return(the_sample) tic = time.time() n_samples = 2500 rt = ResultsTable(n_samples) rt.setHeading(0, "num") rt.setHeading(1, "ECD nm") for x in range(0, n_samples): my_sample = gen_random_lognormal_particle(50.0, 1.2) rt.setValue(0, x, x+1) rt.setValue(1, x, my_sample) rt.show("Results") IJ.saveAs("Results", csv_out) toc = time.time() elapsed = toc - tic print("generated %g particles" % n_samples) print("completed in %g sec" % elapsed ) print("saved here:") print("%s" % csv_out ) print("done...") rt.reset()