def fit_gauss(lroi, imp, p, peak_id, id_, type_, rm): lroi.setName("{}_{}_{}".format(str(id_), peak_id, type_)) imp.setRoi(lroi) rm.addRoi(lroi) prof = ProfilePlot(imp) y = prof.getProfile() x = xrange(len(y)) fitter = CurveFitter(x, y) fitter.doFit(CurveFitter.GAUSSIAN) param_values = fitter.getParams() std = param_values[3] fwhm = 2.3548 * std r2 = fitter.getFitGoodness() y_ = [fitter.f(x_) for x_ in x] area_profile = sum(y) - len(y) *min(y) area_gauss = sum(y_) - len(y_)*min(y_) output = {} output["x_pos"] = p.x output["y_pos"] = p.y output["fwhm"] = fwhm output["fwhm_nm"] = pixel_size_nm * fwhm output["r2_GoF"] = r2 output["id"] = id_ output["peak_id"] = peak_id output["type"] = type_ # yai, excel maagic :-) output["avg_fwhm"] = '=AVERAGEIFS(F:F,B:B,B{},F:F,"<>"&"")'.format(id_+2) output["area_profile"] = area_profile output["area_gauss"] = area_gauss if peak_id == DEBUG: plot = Plot("ROI peak {} type {}".format(peak_id, type_), "X (gray)", "Y (fit window)") plot.setLineWidth(2) plot.setColor(Color.RED) plot.addPoints(x, y, Plot.LINE) plot.setColor(Color.BLUE) plot.addPoints(x, y_, Plot.LINE) plot.show() return output
def create_plot(imp, method, average, threshold=0.1): intensity = cross_section_intensity(imp, method) cal = imp.getCalibration() x_inc = cal.pixelWidth; units = cal.getUnits(); x_label = "Distance (%s)" % units y_label = 'Intensity' # cal.getValueUnit() x_values = [i*x_inc for i in range(len(intensity))] lastindex = len(x_values)-1 for i in range(1, len(x_values)+1): index = len(x_values)-i if intensity[index] == 0: lastindex = index-1 else: break ax = [x_values[i] for i in range(lastindex)] ay = [intensity[i] for i in range(lastindex)] average_x, average_y = rolling_average(ax, ay, average) firstidx, lastidx, threshold_intensity = get_thresholded_idx(average_y, threshold=threshold) perform_trim = firstidx!=-1 and lastidx!=-1 if perform_trim: trim_x = [average_x[i] for i in range(firstidx, lastidx+1)] trim_y = [average_y[i] for i in range(firstidx, lastidx+1)] # raw data flags = Plot.getDefaultFlags() flags = flags - Plot.Y_GRID - Plot.X_GRID plot = Plot("%s-Plot" % imp.getTitle(), x_label, y_label, flags) plot.setLineWidth(1) plot.setColor(Color.BLACK) plot.addPoints(x_values, intensity,Plot.LINE) # threshold line plot.setLineWidth(2) plot.setColor(Color.BLACK) plot.addPoints([0,x_inc * imp.getWidth()], [threshold_intensity,threshold_intensity],Plot.LINE) # rolling average plot.setLineWidth(2) plot.setColor(Color.MAGENTA) plot.addPoints(average_x,average_y,Plot.LINE) # standard legend labels labels = "\t".join(['Raw Data (%s)' % method, 'Intensity threshold (%d%s)' % (100*threshold, '%'), 'Rolling Average (n=%d)' % average]) # trimmed rolling average if perform_trim: plot.setLineWidth(2) plot.setColor(Color.GREEN) plot.addPoints(trim_x,trim_y,Plot.LINE) labels+='\tTrimmed Rolling Average (n=%d)' % average plot.setColor(Color.BLACK) plot.setLimitsToFit(False) plot.addLegend(labels) rt = ResultsTable() for row,x in enumerate(x_values): rt.setValue(DIST_RAW_COL, row, x) rt.setValue(INT_RAW_COL, row, intensity[row]) for row,x in enumerate(average_x): rt.setValue(DIST_AVG_COL, row, x) rt.setValue(INT_AVG_COL, row, average_y[row]) if perform_trim: for row,x in enumerate(trim_x): rt.setValue(DIST_TRIM_COL, row, x) rt.setValue(INT_TRIM_COL, row, trim_y[row]) return plot, rt
# Means per frame and then mean of mean listmeans = ImagesMean(dataset, z) stackmeans = computeMean(listmeans) filemeans.append(stackmeans) # Stds per frame. liststds = ImagesStd(dataset,listmeans, z) grouped = group_stds(liststds) #std the std. filestds.append(grouped) return filemeans, filestds # MAIN CODE srcDir = DirectoryChooser("Choose").getDirectory() filelist = get_file_list(srcDir, '.tif') means, stds = main(filelist) # PLOTTING plot = Plot("PTC", "Mean", "Std") plot.setLimits(0.00, 200.0, 0.00, 100.0) plot.setColor(Color.BLUE) plot.addPoints(means, stds, Plot.CROSS) plot.show() print means, stds
mean = getMean(ip, imp) means.append(mean) IJ.showProgress(1) IJ.resetMinAndMax() #set up the variables for plotting and then plot! x = xrange(1, size + 1) y = means plot = Plot("Illumination intensity stability (" + path.basename(stackpath) + ")", "Frame", "Mean frame intensity", [], []) plot.setLineWidth(1) #plot.setColor(Color.BLACK) plot.addPoints(x, y, Plot.LINE) plot_window = plot.show() def stdev(s): avg = sum(s)*1.0/len(s) variance = map(lambda x: (x-avg)**2, s) return math.sqrt(average(variance)) def average(x): average = sum(x)*1.0/len(x) return average IJ.log("Results for " + path.basename(stackpath) + ":") IJ.log("Average intensity: " + str(average(means))) IJ.log("Standard deviation: " + str(stdev(means)))
mx1 = [] mx2 = [] my1 = [] my2 = [] for i in range(0, len(x1)): mx1.append(x1[i] - dx[i] / 2) my1.append(y1[i] - dy[i] / 2) mx2.append(x2[i] - dx[i] / 2) my2.append(y2[i] - dy[i] / 2) #plt = Plot(fName, "degrees","degrees") #plt.setLimits(-10,10, -10, 10) ##plt.setAxes(False,False,True, True,False, False, 1, 10); #plt.setFrameSize(500,500); #plt.draw() #plt.addPoints(cx,cy,Plot.CIRCLE); #plt.drawVectors(x1,y1,x2,y2) #plt.show() plt2 = Plot(fName, "degrees", "degrees") plt2.setLimits(-10, 10, -10, 10) plt2.setAxes(False, False, True, True, False, False, 1, 10) plt2.setFrameSize(500, 500) plt2.draw() plt2.addPoints(cx, cy, Plot.CIRCLE) plt2.drawVectors(mx1, my1, mx2, my2) plt2.setColor(java.awt.Color.RED) plt2.setLineWidth(2) plt2.addPoints(x1, y1, Plot.CIRCLE) plt2.show()
for filename, row, row_value in all_ydata: table.set(filename, row, row_value) uiservice.show("MergedFiles_%s" % data_identifier, table) log("Retrieving statistics for merged Y-data...") list_of_rows = defaultdict(list) for data in all_ydata: list_of_rows[data[1]].append(data[2]) row_stats = {} for row_key, row_values in list_of_rows.iteritems(): row_stats[row_key] = (mean(row_values), stdev(row_values), len(row_values)) table = newtable(xcolumn_header, xvalues) for key, value in row_stats.iteritems(): table.set("Mean", int(key), value[0]) table.set("StdDev", int(key), value[1]) table.set("N", int(key), value[2]) uiservice.show("Stats_%s" % data_identifier, table) plot = Plot("Mean Sholl Plot [%s]" % ycolumn_header, xcolumn_header, "N. of intersections") plot.setLegend("Mean"+ u'\u00B1' +"SD", Plot.LEGEND_TRANSPARENT + Plot.AUTO_POSITION) plot.setColor("cyan", "blue") plot.addPoints(table.get(0), table.get(1), table.get(2), Plot.CONNECTED_CIRCLES, data_identifier) plot.show() log("Parsing concluded.") main()
row=[ str(p[i]) for p in profiles ] row=",".join(row) f.write(row+"\n") # Generate a plot if doPlot: from ij.gui import Plot from java.awt import Color p = Plot('Profiles','Channel #','Intensity') p.setSize(640,480) maxP = len(profiles) maxV = 0 for iprofile,profile in enumerate(profiles): h = 0.66-(float(iprofile)/maxP) if h<0: h=h+1 p.setColor(Color.getHSBColor( h,.8,1)) p.addPoints(range(len(profile)),profile,p.LINE) maxV_=max(profile) if maxV < maxV_: maxV = maxV_ p.setLimits(0,len(profile)-1,0,maxV*1.2) p.setLegend("\n".join(names),p.TOP_LEFT|p.LEGEND_TRANSPARENT) p.show() # Save the plot as PNG if doSavePlot: imp = p.getImagePlus() IJ.saveAs(imp,'PNG',file.absolutePath + "_compensationPlot.png")
else: sliceAvgInt[currentSlice - 1] = 0 sliceAboveZeroNorm[currentSlice - 1] = 0 sliceExpectedRadNorm[currentSlice - 1] = 0 print("writing to file...") # Find thisStr = IJ.getDirectory("image") upStr = thisStr[:thisStr.find("merged_videos/")] rezPath = upStr + "blink_files/result_new.txt" myfile = open(rezPath, 'w') for i in range(len(slicesIdx)): myfile.write( str(slicesIdx[i]) + " " + str(sliceAvgInt[i]) + " " + str(sliceAboveZeroNorm[i]) + " " + str(sliceExpectedRadNorm[i]) + "\n") myfile.close() print("plotting...") plot = Plot("Title", "X", "Y") plot.setLimits(1.0, img2.getNSlices(), 0.0, 1.0) plot.setColor(Color.RED) plot.addPoints(slicesIdx, sliceAvgInt, Plot.CROSS) plot.setColor(Color.BLUE) plot.addPoints(slicesIdx, sliceAboveZeroNorm, Plot.CROSS) plot.setColor(Color.GREEN) plot.addPoints(slicesIdx, sliceExpectedRadNorm, Plot.CROSS) plot.show()
def plots(values, timelist, Cell_number, value_type, Stim_List, dirs, parameters): """ Plots all calculated values, saves plots to generated directory, returns plot scale. """ Mean_plot = 0 # Flatten nested lists (normalized lists are not nested). if value_type == "Normalized aFRET mean": values_concat = [ values[i:i+Cell_number] for i in range(0, (len(values)), Cell_number) ] Mean_sd = [ standard_deviation(values_concat[i]) for i in range(len(values_concat)) ] Mean_sd = [item for sublist in Mean_sd for item in sublist] Mean_plot = 1 elif value_type == "Normalized dFRET mean": values_concat = [ values[i:i+Cell_number] for i in range(0, (len(values)), Cell_number) ] Mean_sd = [ standard_deviation(values_concat[i]) for i in range(len(values_concat)) ] Mean_sd = [item for sublist in Mean_sd for item in sublist] Mean_plot = 1 else: if "Normalized" not in value_type: values = [item for sublist in values for item in sublist] #Repeats list items x cell_number (match timepoints with # of cells). timelist = [x for item in timelist for x in repeat(item, Cell_number)] # Scaling of plots. max_Y = 1 if max(values) > 3: if not isinstance(values[0], list): max_Y = max(values)*1.3 elif max(values) > 2.5: max_Y = 3.3 elif max(values) > 2: max_Y = 2.7 elif max(values) > 1.5: max_Y = 2.2 elif max(values) > 1.3: max_Y = 1.7 elif max(values) > 1: max_Y = 1.4 min_Y = 0 if min(values) > 2: min_Y = min(values)*0.8 elif min(values) > 1.5: min_Y = 1.5 elif min(values) > 1: min_Y = 1 elif min(values) > 0.5: min_Y = 0.2 elif min(values) < -0.5: min_Y = min(values)*1.3 elif min(values) < -0.2: min_Y = -0.3 elif min(values) < -0.1: min_Y = -0.15 elif min(values) < -0.08: min_Y = -0.1 elif min(values) < -0.05: min_Y = -0.08 elif min(values) < -0.01: min_Y = -0.06 # Scaling of normalized plots.. if "Normalized" in value_type: min_Y, max_Y = float(parameters["p_min_n"]), float(parameters["p_max_n"]) if value_type == "dFRET": max_Y = float(parameters["p_max"]) min_y = float(parameters["p_min"]) elif value_type =="aFRET": max_Y = float(parameters["p_max"]) min_y = float(parameters["p_min"]) # Call plot, set scale. plot = Plot(Title, "Time (minutes)", value_type) if len(timelist) > 1: plot.setLimits(min(timelist), max(timelist), min_Y, max_Y) else: plot.setLimits(-1, 1, min_Y, max_Y) # Retrieve colors. Colors, Colors_old = colorlist() # Set colors, plot points. if Mean_plot == 0: for i in range(Cell_number): if i < 19: plot.setColor(Color(*Colors[i][0:3])) elif i >= 19: plot.setColor(eval(Colors_old[i])) print "Out of fancy colors, using java.awt.color defaults" elif i > 28: print "29 color limit exceeded" return plot.setLineWidth(1.5) plot.addPoints(timelist[i :: Cell_number], values[i :: Cell_number], Plot.LINE) plot.setLineWidth(1) # Comment in to define color + fillcolor for circles. plot.setColor(Color(*Colors[i][0:3]), Color(*Colors[i][0:3])) #plot.addPoints(timelist[i :: Cell_number], values[i :: Cell_number], Plot.CIRCLE) else: min_Y, max_Y = 0.6, 1.6 if len(timelist) > 1: plot.setLimits(min(timelist), max(timelist), min_Y, max_Y) else: plot.setLimits(-1, 1, min_Y, max_Y) plot.setColor("Color.BLACK") plot.setLineWidth(1.5) plot.addPoints(timelist[0 :: Cell_number], Mean_sd[0::2], Plot.LINE) plot.setLineWidth(1) plot.setColor("Color.BLACK", "Color.BLACK") plot.addPoints(timelist[0 :: Cell_number], Mean_sd[0::2], Plot.CIRCLE) plot.setColor(Color(*Colors[6][0:3])) plot.addErrorBars(Mean_sd[1::2]) # Get's stim name from input. if not Stim_List == False: text = [ sublist[i] for sublist in Stim_List for i in range(len(Stim_List)) ] Stim_List = [ sublist[1:] for sublist in Stim_List ] # Plot stimulation markers. plot.setLineWidth(2) for sublist in Stim_List: plot.setColor("Color.GRAY") plot.drawLine(sublist[0], min_Y+((max_Y-min_Y) * 0.82), sublist[1], min_Y+((max_Y-min_Y) * 0.82)) plot.drawDottedLine(sublist[0], min_Y+((max_Y-min_Y) * 0.82), sublist[0], -1, 4) plot.drawDottedLine(sublist[1], min_Y+((max_Y-min_Y) * 0.82), sublist[1], -1, 4) plot.setFont(Font.BOLD, 16) plot.addText(text[0], sublist[0], min_Y+((max_Y-min_Y) * 0.82)) cell_num = 0 if "concentration" not in value_type: testfile = open(os.path.join(dirs["Tables"], value_type + ".txt"), "w") data = plot.getResultsTable() headings = data.getHeadings() datadict = {} for heading in headings: index = data.getColumnIndex(heading) if "Y" in heading: column = { "Cell "+str(cell_num).zfill(2) : [round(float(i), 4) for i in data.getColumn(index)] } elif "X" in heading: column = {"X" : [round(float(i), 4) for i in data.getColumn(index)] } cell_num += 1 datadict.update(column) sorted_data = [] for row in zip(*([key] + value for key, value in sorted(datadict.items()))): sorted_data.append(row) testfile.write("\t\t".join(sorted_data[0])) # Prints output in columns, copy paste directly to sigma/prisma/excel etc. for cell in range (1, len(sorted_data), 1): testfile.write("\n") for times in range(len(sorted_data[cell])): testfile.write(str(sorted_data[cell][times]) + "\t\t") # Dumps sorted data to JSON format, for use in eg. matplotlib. with open(os.path.join(dirs["Tables"], value_type + ".json"), "w") as outfile: datadict["Stim"] = Stim_List json.dump(datadict, outfile, sort_keys=True) testfile.close() # Generate High-res plot with anti-aliasing (Scale x 1). plot = plot.makeHighResolution(Title, 1, True, True) #PlotWindow.noGridLines = True # Save plot with appropriate title. IJ.saveAs(plot, "PNG", os.path.join(dirs["Plots"], str(Title)+str(value_type))) # (For ratiometric image-generator) return max_Y, min_Y
if (amplificacion > MaxAmplificacion) : MaxAmplificacion=amplificacion ROptima=LogFilterSigma if amplificacion <=ValuePrev : RepCounter+=1 else: RepCounter=0 ValuePrev=amplificacion #Break if the value decreases for 3 consecutive values if(RepCounter==3): break return MaxAmplificacion,ROptima #Main Method image = IJ.getImage() Moment3=CheckMoment(image) Mom3Norm=[i/Moment3[0]-1.0 for i in Moment3] NFrames= image.getNFrames() xArr = array(range(1,NFrames+1), 'd') plot = Plot("Title", "Time", "Delta m ",xArr,Mom3Norm) plot.setLimits(1, NFrames, min(Mom3Norm), max(Mom3Norm)) plot.setColor(Color.BLUE) plot.addPoints(xArr,Mom3Norm,Plot.CROSS) plot.show()
def process(dirIn, es, ee): roi_w = 30 roi_h = 30 iStart = 6-1 #srcDir = DirectoryChooser("Choose directory").getDirectory() srcDir=dirIn if not srcDir: return destDir = srcDir+"--analysis" print "creating: "+destDir # destDir = srcDir if os.path.exists(destDir): shutil.rmtree(destDir) time.sleep(1) os.mkdir(destDir) # fileId = IJ.getString("filenames MUST contain:", "FRAP.lsm"); fileId = "FRAP.lsm" iFile = 0 sOut = [] print "starting analysis in folder: "+srcDir for root, directories, filenames in os.walk(srcDir): print "sadsdfs" print directories for filename in filenames: print filename if not (fileId in filename): continue iFile = iFile + 1 if iFile < es: continue if iFile > ee: f.close() return() # extract information from file and foldernames print "root = %s" % root (tmp,folder2) = splitLastFolder(root) tmp = folder2.split("--")[4] replicate = folder2 print replicate wells = tmp.split("-") print wells tmp = filename.split("--")[1] tmp = tmp.split("W")[1] wellNum = int(tmp) print wellNum well = wells[wellNum-1] print well tmp = filename.split("--")[0:4] tmp = '--'.join(tmp) filenameDocu = tmp+".lsm--docu.png" filenameLSM = tmp+".lsm" pathFRAP = os.path.join(root,filename) pathDocu = os.path.join(root,filenameDocu) pathLSM = os.path.join(root,filenameLSM) pathFRAPandSEG = os.path.join(destDir,filename+"--raw+seg.tif") print pathFRAP print pathFRAPandSEG print pathDocu print pathLSM f = open(os.path.join(destDir, filename+"--metadata.csv"), 'w') f.write("well,replicate,pathFRAP,pathFRAPandSEG,pathDocu,pathLSM\n") f.write(well+","+replicate+","+pathFRAP+","+pathFRAPandSEG+","+pathDocu+","+pathLSM+"\n") f.close() #IJ.run("Close all forced", "") # load data (possible headless) path = os.path.join(root, filename) #imp = IJ.openImage(path) impA = getImps(path) imp = impA[0] imp.show() #IJ.run("Bio-Formats Importer","open="+path+" color_mode=Default split_channels view=Hyperstack stack_order=XYCZT") # remove transmisson channel IJ.run("Slice Remover", "first=2 last=100000 increment=2"); imp = IJ.getImage() #imp.show() imp.setTitle("raw") IJ.run("Properties...", "unit=pixels pixel_width=1 pixel_height=1"); dt = imp.getFileInfo().frameInterval print "dt = %f" % dt if dt==0: dt=0.28 print "dt = %f" % dt # todo: can i get the real time-stamps? im_w = imp.getWidth() im_h = imp.getHeight() print "im_w ="+str(im_w) print "im_h ="+str(im_h) nt = max(imp.getNSlices(),imp.getNFrames()) print "nt = %f" % nt roi_x = im_w/2 - roi_w/2 roi_y = im_h/2 - roi_h/2 roi_x2 = roi_x + roi_w roi_y2 = roi_y + roi_h # preprocessing IJ.run("Duplicate...","title=gb duplicate stack") # smooth IJ.run("Gaussian Blur...", "sigma=2 stack"); # tophat IJ.run("3D Fast Filters","filter=TopHat radius_x_pix=10 radius_y_pix=10 radius_z_pix=0 Nb_cpus=4"); IJ.getImage().setTitle("gb_th") # threshold IJ.run("Duplicate...","title=bw duplicate stack") # maybe global TH now, because there is already a tophat? #IJ.run("Auto Threshold", "method=Default white stack use_stack_histogram"); IJ.run("Auto Local Threshold", "method=Niblack radius=40 parameter_1=3 parameter_2=0 white stack"); # segment particles #IJ.getImage().setRoi(roi_x, roi_y, roi_w, roi_h) IJ.run("Set Measurements...", " mean min integrated center stack redirect=gb_th decimal=2"); IJ.run("Analyze Particles...", "size=10-10000 pixel circularity=0.00-1.00 show=Masks display exclude clear stack"); IJ.getImage().setTitle("particles") # measure particles rt = Analyzer.getResultsTable() # todo: add the particles that are actually analyzed (size filter, see above) # combine for documentation IJ.run("Combine...", "stack1=raw stack2=bw"); IJ.getImage().setTitle("combine_raw_bw") IJ.run("Combine...", "stack1=combine_raw_bw stack2=particles"); IJ.getImage().setTitle("combine_raw_bw_particles") impFRAPandSEG = IJ.getImage() IJ.saveAs(impFRAPandSEG, "Tiff", pathFRAPandSEG) # extract intensity informations nb = [0 for i in range(nt)] nc = [0 for i in range(nt)] ib = [0 for i in range(nt)] ic = [0 for i in range(nt)] fb = [0 for i in range(nt)] fc = [0 for i in range(nt)] t = [i*dt for i in range(nt)] if(rt.getColumnIndex("Slice")==-1): state = "no particles at all" else: Slice = rt.getColumn(rt.getColumnIndex("Slice")) Mean = rt.getColumn(rt.getColumnIndex("Mean")) Max = rt.getColumn(rt.getColumnIndex("Max")) X = rt.getColumn(rt.getColumnIndex("XM")) Y = rt.getColumn(rt.getColumnIndex("YM")) IntDen = rt.getColumn(rt.getColumnIndex("IntDen")) for i in range(len(Slice)): # inside bleach roi?! it = int(Slice[i])-1 # todo: maybe use a dictionary instead (one can remove items and have differnt t if ( (X[i]>roi_x) & (X[i]<roi_x2) & (Y[i]>roi_y) & (Y[i]<roi_y2) ): nb[it] = nb[it]+1 ib[it] = ib[it]+IntDen[i] # the IntDen copes best with in and out of focus motions as well as shape changes fb[it] = fb[it]+Max[i] else: nc[it] = nc[it]+1 ic[it] = ic[it]+IntDen[i] fc[it] = fc[it]+Max[i] # compute mean values per particle for i in range(len(t)): if nb[i]>0: fb[i] = fb[i]/ib[i] ib[i] = ib[i]/nb[i] if nc[i]>0: fc[i] = fc[i]/ic[i] ic[i] = ic[i]/nc[i] nb1 = [(0,1)[i==1] for i in nb] iShortlyAfter = iStart+10 if( sum(nb1[0:iStart]) < 0.5*iStart ): state = "not enough pre-bleach measurements" elif( sum(nb1[iStart:iShortlyAfter]) < 0.5*10 ): state = "not enough short term measurements" elif( sum(nb1[iShortlyAfter+1:len(nb1)-1]) < 0.5*(len(nb1)-(iShortlyAfter+1)) ): state = "not enough long term measurements" plot_size_x = 500 plot_size_y = 500 # plot number of particles plotParticles = Plot( "Particles", "time", "number of particles", t, nb ) plotParticles.setFrameSize(plot_size_x,plot_size_y) plotParticles.setSize(plot_size_x,plot_size_y) plotParticles.setLimits(min(t),max(t),0,1.2*max(max(nb),max(nc))) plotParticles.addPoints( t, nb, 3 ) plotParticles.addPoints( t, nc, 4 ) plotParticles.show() # plot raw intensities plotIntensities = Plot( "Intensities", "time", "gray values in particles", t, ib ) plotIntensities.setFrameSize(plot_size_x,plot_size_y) plotIntensities.setSize(plot_size_x,plot_size_y) plotIntensities.setLimits(min(t),max(t),0,1.2*max(max(ic),max(ib))) plotIntensities.addPoints( t, ib, 3 ) plotIntensities.addPoints( t, ic, 4 ) #plotIntensities.show() # plot sharpness plotSharpness = Plot( "Sharpness", "time", "Max/IntDen", t, fb ) plotSharpness.setFrameSize(plot_size_x,plot_size_y) plotSharpness.setSize(plot_size_x,plot_size_y) plotSharpness.setLimits(min(t),max(t),0,1.2*max(max(fc),max(fb))) plotSharpness.addPoints( t, fb, 3 ) plotSharpness.addPoints( t, fc, 4 ) #plotSharpness.show() # FITTING state = "ok" # extract all time points relevant to the fitting xTmp = [] yTmp = [] #print len(t) #print range(iStart,len(t)) ipb = ib[0:iStart-1] yTmpNorm = max(1,sum(ipb)/len(ipb)) # in order to avoid division by zero #print ib[iStart-1] for i in range(iStart,len(t)): if nb1[i]==1: # only time-point with 1 particle xTmp.append(float(t[i]-t[iStart])) yTmp.append(float(ib[i]/yTmpNorm)) # do the fitting # todo: how to add initial guesses?? imFrac = 0 tau = 0 xFit = [] yFit = [] if(len(yTmp)>20): cf = CurveFitter( xTmp, yTmp ) cf.doFit(cf.EXP_RECOVERY) print cf.getFormula() p = cf.getParams() for i in p: print i imFrac = (1-p[2]-p[0]) tau = (1/p[1]) for i in range(len(xTmp)): xFit.append(float(xTmp[i])) yFit.append(float(cf.f(cf.getParams(),xTmp[i]))) else: state = "not enough data points for fitting" # shift the fitting curves back to the original bleaching time point #for i in range(len(xTmp)): # xFit[i]=xFit[i]+t[iStart] # xTmp[i]=xTmp[i]+t[iStart] # plot fitting plotFit = Plot( "Fitting", "time after bleach", "normalised intensity of bleached particle", xFit, yFit ) plotFit.setFrameSize(plot_size_x,plot_size_y) plotFit.setSize(plot_size_x,plot_size_y) if(len(xTmp)>0): plotFit.setLimits(min(xTmp),max(xTmp),0,1.2*max(yTmp)) plotFit.addPoints( xTmp, yTmp, 3 ) #plotFit.addPoints( xFit, yFit, 4 ) plotFit.addLabel(0.1, 0.95, "imm_frac=%.2f tau[s]=%.2f" % (imFrac,tau)) plotFit.addLabel(0.1, 0.9, state) # show the plots IJ.run("Close all forced", "") plotIntensities.show() plotParticles.show() plotSharpness.show() plotFit.show() # make one figure from the plots IJ.run("Images to Stack", "name=Stack title=[] use"); IJ.run("Make Montage...", "columns=4 rows=1 scale=1 first=1 last=4 increment=1 border=0 font=12"); imp = IJ.getImage() print "saving image: "+os.path.join(destDir, filename+"--IJ_graphs.png") dest = os.path.join(destDir, filename+"--IJ_graphs.png") IJ.saveAs(imp,"PNG", dest) # write text files dest = os.path.join(destDir, filename+"--intensBleach.csv") writeXYfile(t,ib,dest,",") dest = os.path.join(destDir, filename+"--intensCtrl.csv") writeXYfile(t,ic,dest,",") dest = os.path.join(destDir, filename+"--numParticlesBleach.csv") writeXYfile(t,nb,dest,",") dest = os.path.join(destDir, filename+"--numParticlesCtrl.csv") writeXYfile(t,nc,dest,",") IJ.run("Close all forced", "") return()
IJ.log( fitter.getResultString() ) # Overlay fit curve, with oversampling (for plot) xfit = [ (t / 10.0 + bleach_frame) * frame_interval for t in range(10 * len(xtofit) ) ] yfit = [] for xt in xfit: yfit.append( fitter.f( fitter.getParams(), xt - xfit[0]) ) plot = Plot("Normalized FRAP curve for " + current_imp.getTitle(), "Time ("+time_units+')', "NU", [], []) plot.setLimits(0, max(x), 0, 1.5 ); plot.setLineWidth(2) plot.setColor(Color.BLACK) plot.addPoints(x, y, Plot.LINE) plot.addPoints(x,y,PlotWindow.X); plot.setColor(Color.RED) plot.addPoints(xfit, yfit, Plot.LINE) plot.setColor(Color.black); plot_window = plot.show() # Output FRAP parameters thalf = math.log(2) / param_values[1] mobile_fraction = param_values[0] str1 = ('Half-recovery time = %.2f ' + time_units) % thalf
intensities.append(mean) IJ.log('For image ' + current_imp.getTitle()) IJ.log('Time interval is ' + str(frame_interval) + ' ' + time_units) # Build plot x = [i * frame_interval for i in range(n_slices)] y = intensities plot = Plot("Backgrouncurve " + current_imp.getTitle(), "Time (" + time_units + ')', "NU", [], []) plot.setLimits(0, max(x), 0, max(y)) plot.setLineWidth(2) plot.setColor(Color.BLACK) plot.addPoints(x, y, Plot.LINE) plot.addPoints(x, y, PlotWindow.X) plot.setColor(Color.black) plot_window = plot.show() ############################### # Save data as a json file ############################### # Ask for filename savename_temp = os.path.splitext(stack_title)[0] + '_cell_XX' save_file = SaveDialog('Please choose a location to save results', file_dir, savename_temp, '.json')
def scatter_plot(title, x, y, x_lab, y_lab): plot = Plot(title, x_lab, y_lab, [], []) plot.addPoints(x, y, Plot.CIRCLE) #plot.setLimits(min(x), plot.show()
# create example data arrays xa = [1., 2., 3., 4.] ya = [3., 3.5, 4., 4.5]; # construct a CurveFitter instance cf = CurveFitter(xa, ya); # actual fitting # fit models: see http://rsb.info.nih.gov/ij/developer/api/constant-values.html#ij.measure.CurveFitter.STRAIGHT_LINE cf.doFit(CurveFitter.STRAIGHT_LINE); # print out fitted parameters. b = cf.getParams()[0] m = cf.getParams()[1] strOut = str(b) + " : " + str(m) IJ.log(strOut); xb = [0 ,5] yb = [b, 5*m+b] pl = Plot("Data", "x", "y") pl.setLimits(0,5,0,5) pl.addPoints(xa, ya, Plot.CIRCLE) pl.drawLine(xb[0], yb[0], xb[1], yb[1]) pl.show()