Beispiel #1
0
def batchAnalysis(analysis_exe,
                  input_directory,
                  output_directory,
                  multi_xml,
                  max_processes=2):
    minimum_length = 100

    dax_files = glob.glob(input_directory + "*.dax")

    # setup process queue
    process_count = 0
    results = Queue.Queue()

    # start processes
    procs = []
    for i, file in enumerate(dax_files):

        print "Found:", file

        movie_obj = datareader.inferReader(file)
        if (movie_obj.filmSize()[2] > minimum_length):
            basename = os.path.basename(file)
            mlistname = output_directory + "/" + basename[:-4] + "_mlist.bin"
            print "  ->", mlistname

            try:
                # Wait for a process to stop before starting
                # the next one if we are at the limit.
                if (process_count >= max_processes):
                    description, rc = results.get()
                    print description
                    process_count -= 1
                proc = subprocess.Popen(
                    ['python', analysis_exe, file, mlistname, multi_xml])
                procs.append(proc)
                thread.start_new_thread(
                    process_waiter, (proc, "Finished: " + basename, results))
                process_count += 1

            except KeyboardInterrupt:
                for proc in procs:
                    if (not proc.poll()):
                        proc.send_signal(signal.CTRL_C_EVENT)

    # wait until all the processes finish
    try:
        while (process_count > 0):
            description, rc = results.get()
            print description
            process_count -= 1

    except KeyboardInterrupt:
        for proc in procs:
            if (not proc.poll()):
                proc.send_signal(signal.CTRL_C_EVENT)
 def loadMovie(self):
     movie_filename = str(QtGui.QFileDialog.getOpenFileName(self,
                                                            "Load Movie",
                                                            self.directory,
                                                            "*.dax *.spe *.tif"))
     if movie_filename:
         self.directory = os.path.dirname(movie_filename)
         self.movie_file = datareader.inferReader(movie_filename)
         [self.film_x, self.film_y, self.film_l] = self.movie_file.filmSize()
         self.ui.fileLabel.setText(movie_filename)
         self.cur_frame = 0
         self.multi_list = False
         self.incCurFrame(0)
Beispiel #3
0
 def loadMovie(self):
     movie_filename = str(QtGui.QFileDialog.getOpenFileName(self,
                                                            "Load Movie",
                                                            self.directory,
                                                            "*.dax *.spe *.tif"))
     if movie_filename:
         self.directory = os.path.dirname(movie_filename)
         self.movie_file = datareader.inferReader(movie_filename)
         [self.film_x, self.film_y, self.film_l] = self.movie_file.filmSize()
         self.ui.fileLabel.setText(movie_filename)
         self.cur_frame = 0
         self.multi_list = False
         self.incCurFrame(0)
def batchAnalysis(analysis_exe, input_directory, output_directory, multi_xml, max_processes = 2):
    minimum_length = 100

    dax_files = glob.glob(input_directory + "*.dax")

    # setup process queue
    process_count = 0
    results = Queue.Queue()

    # start processes
    procs = []
    for i, file in enumerate(dax_files):

        print "Found:", file

        movie_obj = datareader.inferReader(file)
        if(movie_obj.filmSize()[2] > minimum_length):
            basename = os.path.basename(file)
            mlistname = output_directory + "/" + basename[:-4] + "_mlist.bin"
            print "  ->",mlistname

            try:
                # Wait for a process to stop before starting
                # the next one if we are at the limit.
                if(process_count >= max_processes):
                    description, rc = results.get()
                    print description
                    process_count -= 1
                proc = subprocess.Popen(['python', analysis_exe, file, mlistname, multi_xml])
                procs.append(proc)
                thread.start_new_thread(process_waiter, (proc, "Finished: " + basename, results))
                process_count += 1

            except KeyboardInterrupt:
                for proc in procs:
                    if(not proc.poll()):
                        proc.send_signal(signal.CTRL_C_EVENT)

    # wait until all the processes finish
    try:
        while(process_count>0):
            description, rc = results.get()
            print description
            process_count -= 1

    except KeyboardInterrupt:
        for proc in procs:
            if(not proc.poll()):
                proc.send_signal(signal.CTRL_C_EVENT)
#
if __name__ == "__main__":

    import numpy

    import camera.frame as frame

    # This file is available in the ZhuangLab storm-analysis project on github.
    import sa_library.datareader as datareader

    if (len(sys.argv) != 4):
        print "usage: <settings> <movie_in> <png_out>"
        exit()

    # Open movie & get size.
    data_file = datareader.inferReader(sys.argv[2])
    [width, height, length] = data_file.filmSize()

    # Start spotCounter as a stand-alone application.
    app = QtGui.QApplication(sys.argv)
    parameters = params.halParameters(sys.argv[1])
    parameters.set("setup_name", "offline")
    
    parameters.set("camera1.x_pixels", width)
    parameters.set("camera1.y_pixels", height)
    parameters.set("camera1.x_bin", 1)
    parameters.set("camera1.y_bin", 1)

    spotCounter = SingleSpotCounter(None, parameters)
    #spotCounter.newParameters(parameters, [[255,255,255]])
    spotCounter.newParameters(parameters)
# setup process queue
results = Queue.Queue()
def process_waiter(popen, description, que):
    try:
        popen.wait()
    finally: 
        que.put((description, popen.returncode))
process_count = 0

# start processes
procs = []
for i, file in enumerate(dax_files):

    print "Found:", file

    movie_obj = datareader.inferReader(file)
    if(movie_obj.filmSize()[2] > minimum_length):
        basename = os.path.basename(file)
        mlistname = output_directory + "/" + basename[:-4] + "_mlist.bin"
        print "  ->",mlistname

        try:
            # Wait for a process to stop before starting
            # the next one if we are at the limit.
            if(process_count >= max_processes):
                description, rc = results.get()
                print description
                process_count -= 1
            proc = subprocess.Popen(['python', mufit_exe, file, mlistname, multi_xml])
            procs.append(proc)
            thread.start_new_thread(process_waiter, (proc, "Finished: " + basename, results))
Beispiel #7
0
#
if __name__ == "__main__":

    import numpy

    import camera.frame as frame

    # This file is available in the ZhuangLab storm-analysis project on github.
    import sa_library.datareader as datareader

    if (len(sys.argv) != 4):
        print "usage: <settings> <movie_in> <png_out>"
        exit()

    # Open movie & get size.
    data_file = datareader.inferReader(sys.argv[2])
    [width, height, length] = data_file.filmSize()

    # Start spotCounter as a stand-alone application.
    app = QtGui.QApplication(sys.argv)
    parameters = params.halParameters(sys.argv[1])
    parameters.set("setup_name", "offline")

    parameters.set("camera1.x_pixels", width)
    parameters.set("camera1.y_pixels", height)
    parameters.set("camera1.x_bin", 1)
    parameters.set("camera1.y_bin", 1)

    spotCounter = SingleSpotCounter(None, parameters)
    spotCounter.newParameters(parameters)
Beispiel #8
0
#

import numpy
import sys

import sa_library.datareader as datareader
import sa_library.daxwriter as daxwriter

import scmos_utilities_c

if (len(sys.argv) != 6):
    print "usage: <input_dax> <output_dax> <calib> <sigma> <frames>"
    exit()

# Open the input file.
in_file = datareader.inferReader(sys.argv[1])
f_len = in_file.filmSize()[2]
if (int(sys.argv[5]) > 0) and (int(sys.argv[5]) < f_len):
    f_len = int(sys.argv[5])

# Open the output file.
out_file = daxwriter.DaxWriter(sys.argv[2], 0, 0)

# Load camera calibration (sliced as appropriate
# for the ROI) and create the smoother class.
[offset, variance, gain] = numpy.load(sys.argv[3])
smoother = scmos_utilities_c.Smoother(offset, variance, gain)

# Load images, smooth & output.
sigma_psf = int(round(float(sys.argv[4])))
for i in range(f_len):
Beispiel #9
0
def peakFinding(find_peaks, movie_file, mlist_file, parameters):

    # open files for input & output
    movie_data = datareader.inferReader(movie_file)
    [movie_x,movie_y,movie_l] = movie_data.filmSize()

    # if the i3 file already exists, read it in,
    # write it out & start the analysis from the
    # end.
    total_peaks = 0
    if(os.path.exists(mlist_file)):
        print "Found", mlist_file
        i3data_in = readinsight3.loadI3File(mlist_file)
        try:
            curf = int(numpy.max(i3data_in['fr']))
        except ValueError:
            curf = 0
        print " Starting analysis at frame:", curf
        i3data = writeinsight3.I3Writer(mlist_file)
        if (curf > 0):
            i3data.addMolecules(i3data_in)
            total_peaks = i3data_in['x'].size
    else:
        curf = 0
        i3data = writeinsight3.I3Writer(mlist_file)

    # process parameters
    if hasattr(parameters, "start_frame"):
        if (parameters.start_frame>=curf) and (parameters.start_frame<movie_l):
            curf = parameters.start_frame

    if hasattr(parameters, "max_frame"):
        if (parameters.max_frame>0) and (parameters.max_frame<movie_l):
            movie_l = parameters.max_frame

    # analyze the movie
    # catch keyboard interrupts & "gracefully" exit.
    try:
        while(curf<movie_l):
            #for j in range(l):

            # Set up the analysis.
            image = movie_data.loadAFrame(curf) - parameters.baseline
            mask = (image < 1.0)
            if (numpy.sum(mask) > 0):
                print " Removing negative values in frame", curf
                image[mask] = 1.0

            # Find and fit the peaks.
            [peaks, residual] = find_peaks.analyzeImage(image)

            # Save the peaks.
            if (type(peaks) == type(numpy.array([]))):
                # remove unconverged peaks
                peaks = find_peaks.getConvergedPeaks(peaks)

                # save results
                if(parameters.orientation == "inverted"):
                    i3data.addMultiFitMolecules(peaks, movie_x, movie_y, curf+1, parameters.pixel_size, inverted = True)
                else:
                    i3data.addMultiFitMolecules(peaks, movie_x, movie_y, curf+1, parameters.pixel_size, inverted = False)

                total_peaks += peaks.shape[0]
                print "Frame:", curf, peaks.shape[0], total_peaks
            else:
                print "Frame:", curf, 0, total_peaks
            curf += 1

        print ""
        i3data.close()
        find_peaks.cleanUp()
        return 0

    except KeyboardInterrupt:
        print "Analysis stopped."
        i3data.close()
        find_peaks.cleanUp()
        return 1
Beispiel #10
0
def peakFinding(find_peaks, movie_file, mlist_file, parameters):

    # open files for input & output
    movie_data = datareader.inferReader(movie_file)
    [movie_x, movie_y, movie_l] = movie_data.filmSize()

    # if the i3 file already exists, read it in,
    # write it out & start the analysis from the
    # end.
    total_peaks = 0
    if (os.path.exists(mlist_file)):
        print "Found", mlist_file
        i3data_in = readinsight3.loadI3File(mlist_file)
        try:
            curf = int(numpy.max(i3data_in['fr']))
        except ValueError:
            curf = 0
        print " Starting analysis at frame:", curf
        i3data = writeinsight3.I3Writer(mlist_file)
        if (curf > 0):
            i3data.addMolecules(i3data_in)
            total_peaks = i3data_in['x'].size
    else:
        curf = 0
        i3data = writeinsight3.I3Writer(mlist_file)

    # process parameters
    if hasattr(parameters, "start_frame"):
        if (parameters.start_frame >= curf) and (parameters.start_frame <
                                                 movie_l):
            curf = parameters.start_frame

    if hasattr(parameters, "max_frame"):
        if (parameters.max_frame > 0) and (parameters.max_frame < movie_l):
            movie_l = parameters.max_frame

    # analyze the movie
    # catch keyboard interrupts & "gracefully" exit.
    try:
        while (curf < movie_l):
            #for j in range(l):

            # Set up the analysis.
            image = movie_data.loadAFrame(curf) - parameters.baseline
            mask = (image < 1.0)
            if (numpy.sum(mask) > 0):
                print " Removing negative values in frame", curf
                image[mask] = 1.0

            # Find and fit the peaks.
            [peaks, residual] = find_peaks.analyzeImage(image)

            # Save the peaks.
            if (type(peaks) == type(numpy.array([]))):
                # remove unconverged peaks
                peaks = find_peaks.getConvergedPeaks(peaks)

                # save results
                if (parameters.orientation == "inverted"):
                    i3data.addMultiFitMolecules(peaks,
                                                movie_x,
                                                movie_y,
                                                curf + 1,
                                                parameters.pixel_size,
                                                inverted=True)
                else:
                    i3data.addMultiFitMolecules(peaks,
                                                movie_x,
                                                movie_y,
                                                curf + 1,
                                                parameters.pixel_size,
                                                inverted=False)

                total_peaks += peaks.shape[0]
                print "Frame:", curf, peaks.shape[0], total_peaks
            else:
                print "Frame:", curf, 0, total_peaks
            curf += 1

        print ""
        i3data.close()
        find_peaks.cleanUp()
        return 0

    except KeyboardInterrupt:
        print "Analysis stopped."
        i3data.close()
        find_peaks.cleanUp()
        return 1
Beispiel #11
0
import sa_library.ia_utilities_c as util_c
import sa_library.datareader as datareader
import sa_library.readinsight3 as readinsight3

if (len(sys.argv) != 4):
    print "usage: homotopy_psf <dax_file, input> <bin_file, input> <npy_file, output>"
    exit()

# Minimum number of peaks to calculate the PSF from.
min_peaks = 500

# Half width of the aoi size in pixels.
aoi_size = 8

# Load dax file and corresponding molecule list file.
dax_data = datareader.inferReader(sys.argv[1])
i3_data = readinsight3.loadI3File(sys.argv[2])

# Go through the frames identifying good peaks and adding them
# to the average psf
average_psf = numpy.zeros((4 * aoi_size, 4 * aoi_size))
curf = 1
peaks_used = 0
total = 0.0
[dax_x, dax_y, dax_l] = dax_data.filmSize()
while (curf < dax_l) and (peaks_used < min_peaks):

    # Select localizations in current frame & not near the edges.
    mask = (i3_data['fr'] == curf) & (i3_data['x'] > aoi_size) & (
        i3_data['x'] < (dax_y - aoi_size - 1)) & (i3_data['y'] > aoi_size) & (
            i3_data['y'] < (dax_x - aoi_size - 1))
Beispiel #12
0
#

import numpy
import sys

import sa_library.datareader as datareader
import sa_library.daxwriter as daxwriter

import scmos_utilities_c

if (len(sys.argv) != 6):
    print "usage: <input_dax> <output_dax> <calib> <sigma> <frames>"
    exit()

# Open the input file.
in_file = datareader.inferReader(sys.argv[1])
f_len = in_file.filmSize()[2]
if (int(sys.argv[5]) > 0) and (int(sys.argv[5]) < f_len):
    f_len = int(sys.argv[5])

# Open the output file.
out_file = daxwriter.DaxWriter(sys.argv[2], 0, 0)

# Load camera calibration (sliced as appropriate 
# for the ROI) and create the smoother class.
[offset, variance, gain] = numpy.load(sys.argv[3])
smoother = scmos_utilities_c.Smoother(offset, variance, gain)

# Load images, smooth & output.
sigma_psf = int(round(float(sys.argv[4])))
for i in range(f_len):
if not os.path.exists(acq_folder + "avg561ffc.tif"):
    os.mkdir(acq_folder + "dist_corr//")

    # determine the 99th percentile of intensity values for all pixels in the conventional images

    # find all the Visconv movies
    Visconv_files = glob.glob(acq_folder + 'Visconv_' + '*.dax')
    if len(Visconv_files) > 0:
        cnt = 0
        # pad matrices
        aperc_v488 = [0] * len(Visconv_files)
        aperc_v561 = [0] * len(Visconv_files)
        aperc_v647 = [0] * len(Visconv_files)
        for file in Visconv_files:
            # read 647 image intensities and load to matrix
            dax_file = daxspereader.inferReader(file)
            image = dax_file.loadAFrame(6).astype(numpy.uint16)
            aperc_v647[cnt] = numpy.percentile(image, 99.999)
            # read 561 image intensities and load to matrix
            image = dax_file.loadAFrame(11).astype(numpy.uint16)
            aperc_v561[cnt] = numpy.percentile(image, 99.999)
            # read 488 image intensities and load to matrix
            image = dax_file.loadAFrame(19).astype(numpy.uint16)
            aperc_v488[cnt] = numpy.percentile(image, 99.999)

            cnt = cnt + 1

# find all the IRconv movies
    IRconv_files = glob.glob(acq_folder + 'IRconv_' + '*.dax')
    if len(IRconv_files) > 0:
        cnt = 0
Beispiel #14
0
        """
        return image - self.estimateBG(image, iterations, threshold, wavelet_level)


if (__name__ == "__main__"):

    import sys

    import sa_library.datareader as datareader
    import sa_library.daxwriter as daxwriter

    if (len(sys.argv) < 6):
        print "usage <movie> <wavelet_type> <wavelet_level> <iterations> <threshold> <baseline (optional, 100 default)>"
        exit()

    input_movie = datareader.inferReader(sys.argv[1])
    output_dax = daxwriter.DaxWriter("subtracted.dax", 0, 0)

    iterations = int(sys.argv[4])
    threshold = float(sys.argv[5])
    wavelet_level = int(sys.argv[3])    

    offset = 100.0
    if (len(sys.argv) == 7):
        offset = float(sys.argv[6])

    wbgr = WaveletBGR(wavelet_type = sys.argv[2])

    for i in range(input_movie.filmSize()[2]):

        if((i%10) == 0):
Beispiel #15
0
        return image - self.estimateBG(image, iterations, threshold,
                                       wavelet_level)


if (__name__ == "__main__"):

    import sys

    import sa_library.datareader as datareader
    import sa_library.daxwriter as daxwriter

    if (len(sys.argv) < 6):
        print "usage <movie> <wavelet_type> <wavelet_level> <iterations> <threshold> <baseline (optional, 100 default)>"
        exit()

    input_movie = datareader.inferReader(sys.argv[1])
    output_dax = daxwriter.DaxWriter("subtracted.dax", 0, 0)

    iterations = int(sys.argv[4])
    threshold = float(sys.argv[5])
    wavelet_level = int(sys.argv[3])

    offset = 100.0
    if (len(sys.argv) == 7):
        offset = float(sys.argv[6])

    wbgr = WaveletBGR(wavelet_type=sys.argv[2])

    for i in range(input_movie.filmSize()[2]):

        if ((i % 10) == 0):
Beispiel #16
0
import sa_library.ia_utilities_c as util_c
import sa_library.datareader as datareader
import sa_library.readinsight3 as readinsight3

if (len(sys.argv)!=4):
    print "usage: homotopy_psf <dax_file, input> <bin_file, input> <npy_file, output>"
    exit()

# Minimum number of peaks to calculate the PSF from.
min_peaks = 500

# Half width of the aoi size in pixels.
aoi_size = 8

# Load dax file and corresponding molecule list file.
dax_data = datareader.inferReader(sys.argv[1])
i3_data = readinsight3.loadI3File(sys.argv[2])

# Go through the frames identifying good peaks and adding them
# to the average psf
average_psf = numpy.zeros((4*aoi_size,4*aoi_size))
curf = 1
peaks_used = 0
total = 0.0
[dax_x, dax_y, dax_l] = dax_data.filmSize()
while (curf < dax_l) and (peaks_used < min_peaks):

    # Select localizations in current frame & not near the edges.
    mask = (i3_data['fr'] == curf) & (i3_data['x'] > aoi_size) & (i3_data['x'] < (dax_y - aoi_size - 1)) & (i3_data['y'] > aoi_size) & (i3_data['y'] < (dax_x - aoi_size - 1))
    xr = i3_data['x'][mask]
    yr = i3_data['y'][mask]
spot_list.pixel_size = pix_to_nm
spot_list.nr_spots = localization_number

spot_list.nr_channels = len(channels)

# These are always the same.
spot_list.nr_slices = 1
spot_list.nr_pos = 1
spot_list.fit_mode = 1
spot_list.location_units = 0
spot_list.intensity_units = 0
spot_list.is_track = False

# If a dax file is provided, get the film size.
if os.path.exists(sys.argv[1]):
    data_reader = datareader.inferReader(sys.argv[1])
    [x, y, l] = data_reader.filmSize()

    spot_list.nr_pixels_x = x
    spot_list.nr_pixels_y = y
    spot_list.nr_frames = l

spot_list_offset = tsf_file.tell() - 12

out = spot_list.SerializeToString()
out = encoder._VarintBytes(len(out)) + out
tsf_file.write(out)

# Rewind to the beginning and record the offset of the SpotList message.
tsf_file.seek(4)
setV(tsf_file, ">Q", spot_list_offset)