예제 #1
0
파일: worker.py 프로젝트: zbsun/spts
 def _work_denoise(self, work_package, tmp_package, out_package):
     i = work_package["i"]
     O = OutputCollector()
     image = tmp_package["2_process"]["image"]
     # Denoise
     log_debug(logger, "(%i/%i) Denoise image" % (i + 1, self.N_arr))
     self._update_denoiser()
     image_denoised = self.denoiser.denoise_image(image, full_output=True)
     O.add("image_denoised",
           np.asarray(image_denoised, dtype=np.int16),
           4,
           pipeline=True)
     success = True
     O.add("success", success, 0, pipeline=True)
     out_package["3_denoise"] = O.get_dict(
         self.conf["general"]["output_level"], self.pipeline_mode)
     tmp_package["3_denoise"] = O.get_dict(5, True)
     return out_package, tmp_package
예제 #2
0
파일: worker.py 프로젝트: zbsun/spts
    def work(self,
             work_package,
             tmp_package=None,
             out_package=None,
             target="analyse"):

        W = work_package

        # Update index
        i = W["i"]
        log_debug(logger, "(%i) Start work" % i)
        if self.pipeline_mode:
            self.update()

        if not self._is_valid_i(i):
            logger.warning(
                "Invalid index. Probably we reached the end of the processing range (i=%i, N=%i, N_arr=%i)"
                % (i, self.N, self.N_arr))
            return None

        if tmp_package is None or W["i"] != tmp_package["i"]:
            tmp_package = {"i": i}
        else:
            out_package = dict(tmp_package)
        if out_package is None or W["i"] != out_package["i"]:
            out_package = {"i": i}

        tmp = [("1_raw", self._work_raw), ("2_process", self._work_process),
               ("3_denoise", self._work_denoise),
               ("4_threshold", self._work_threshold),
               ("5_detect", self._work_detect),
               ("6_analyse", self._work_analyse)]

        for work_name, work_func in tmp:
            if not work_name in tmp_package:
                log_debug(logger, "(%i) Starting %s" % (i, work_name))
                out_package, tmp_package = work_func(work_package, tmp_package,
                                                     out_package)
                log_debug(logger, "(%i) Done with %s" % (i, work_name))
            if work_name.endswith(target):
                log_debug(logger, "(%i) Reached target %s" % (i, work_name))
                return out_package
        log_warning(logger, "(%i) Incorrect target defined (%s)" % (i, target))
        return out_package
예제 #3
0
파일: detect.py 프로젝트: Toonggg/spts
def find_particles(image_scored,
                   image_thresholded,
                   min_dist,
                   n_particles_max,
                   peak_centering="center_of_mass"):

    n_lit = image_thresholded.sum()
    log_debug(logger, "%i pixels above threshold" % n_lit)

    success = False
    return_default = success, [], None, None, None, None, None, None, None, None

    if n_lit == 0:
        return return_default

    else:
        log_debug(logger, "Label image")
        labels, n_labels = scipy.ndimage.measurements.label(image_thresholded)
        i_labels = range(1, n_labels + 1)

        if n_labels > n_particles_max:
            log_info(
                logger,
                "%i labels - (frame overexposed? too many particles?), skipping analysis"
                % n_labels)
            return return_default

        V = [image_scored[i_label == labels].max() for i_label in i_labels]

        nx = image_thresholded.shape[1]
        ny = image_thresholded.shape[0]
        x, y = np.meshgrid(np.arange(nx), np.arange(ny))
        x = x[image_thresholded]
        y = y[image_thresholded]
        l = labels[image_thresholded]
        v = image_scored[image_thresholded]

        if peak_centering == "center_of_mass":
            log_debug(logger, "Determine centers of mass")
            com = scipy.ndimage.measurements.center_of_mass(
                image_thresholded, labels, i_labels)
            X = [com_x for com_y, com_x in com]
            Y = [com_y for com_y, com_x in com]

        elif peak_centering == "center_to_max":
            log_debug(logger, "Determine maximum positions")
            X = []
            Y = []
            for i_label in i_labels:
                i_max = (v * (l == i_label)).argmax()
                X.append(x[i_max])
                Y.append(y[i_max])
        else:
            log_and_raise_error(
                logger, "%s is not a valid argument for peak_centering!" %
                peak_centering)

        dislocation = []
        for i_label, xc, yc in zip(i_labels, X, Y):
            i_max = (v * (l == i_label)).argmax()
            dislocation.append(np.sqrt((x[i_max] - xc)**2 +
                                       (y[i_max] - yc)**2))

        merged = list(np.zeros(len(i_labels), dtype=np.bool))

        # Merge too close peaks
        log_debug(logger, "Merge too close points")
        i_labels, labels, X, Y, V, merged, dists_closest_neighbor = merge_close_points(
            i_labels, labels, X, Y, V, merged, min_dist)
        log_debug(logger, "Clean up labels")
        i_labels, labels = clean_up_labels(i_labels, labels)
        n_labels = len(i_labels)

        log_debug(logger, "Measure size of each peak")
        areas = measure_areas(i_labels, labels)

        success = True
        areas = np.asarray(areas)
        X = np.asarray(X)
        Y = np.asarray(Y)
        dislocation = np.asarray(dislocation)
        V = np.asarray(V)
        merged = np.asarray(merged)
        dists_closest_neighbor = np.asarray(dists_closest_neighbor)

        return success, i_labels, labels, areas, X, Y, V, merged, dists_closest_neighbor, dislocation
예제 #4
0
파일: worker.py 프로젝트: zbsun/spts
 def _work_analyse(self, work_package, tmp_package, out_package):
     i = work_package["i"]
     O = OutputCollector()
     image_raw = tmp_package["1_raw"]["image_raw"]
     saturation_mask = tmp_package["1_raw"]["saturation_mask"]
     image = tmp_package["2_process"]["image"]
     n_labels = tmp_package["5_detect"]["n"]
     i_labels = tmp_package["5_detect"]["i_labels"]
     i_labels = i_labels[i_labels != -1]
     image_labels = tmp_package["5_detect"]["image_labels"]
     x = tmp_package["5_detect"]["x"]
     x = x[x != -1]
     y = tmp_package["5_detect"]["y"]
     y = y[y != -1]
     merged = tmp_package["5_detect"]["merged"]
     n_max = self.conf["detect"]["n_particles_max"]
     res = spts.analysis.analyse_particles(
         image=image,
         image_raw=image_raw,
         saturation_mask=saturation_mask,
         i_labels=i_labels,
         labels=image_labels,
         x=x,
         y=y,
         merged=merged,
         full_output=self.conf["general"]["output_level"] >= 3,
         n_particles_max=n_max,
         **self.conf["analyse"])
     success, peak_success, peak_sum, peak_mean, peak_median, peak_min, peak_max, peak_size, peak_saturated, peak_eccentricity, peak_circumference, masked_image, peak_thumbnails = res
     # Analyse image at particle positions
     log_debug(
         logger, "(%i/%i) Analyse image at %i particle positions" %
         (i, self.N_arr, len(i_labels)))
     #if n_labels > self.n_particles_max:
     #    log_warning(logger, "(%i/%i) Too many particles (%i/%i) - skipping analysis for %i particles" % (i_image+1, self.N_arr, n_labels, self.n_particles_max, n_labels - self.n_particles_max))
     O.add("peak_success",
           uniform_particle_array(peak_sum, n_max, np.bool, vinit=False), 0)
     O.add("peak_sum",
           uniform_particle_array(peak_sum, n_max),
           0,
           pipeline=True)
     O.add("peak_mean", uniform_particle_array(peak_mean, n_max), 0)
     O.add("peak_median", uniform_particle_array(peak_median, n_max), 0)
     O.add("peak_min", uniform_particle_array(peak_min, n_max), 0)
     O.add("peak_max", uniform_particle_array(peak_max, n_max), 0)
     O.add("peak_size", uniform_particle_array(peak_size, n_max), 0)
     O.add("peak_eccentricity",
           uniform_particle_array(peak_eccentricity, n_max),
           0,
           pipeline=True)
     O.add("peak_circumference",
           uniform_particle_array(peak_circumference, n_max), 0)
     O.add("peak_saturated",
           uniform_particle_array(peak_saturated, n_max, np.int8, vinit=0),
           0)
     if success:
         if self.conf["analyse"]["integration_mode"] == "windows":
             s = self.conf["analyse"]["window_size"]
         else:
             s = spts.analysis.THUMBNAILS_WINDOW_SIZE_DEFAULT
     if peak_thumbnails is not None and success:
         O.add("peak_thumbnails", np.asarray(peak_thumbnails,
                                             dtype=np.int32), 3)
     else:
         O.add("peak_thumbnails",
               np.zeros(shape=(n_max, s, s), dtype=np.int32), 3)
     if masked_image is not None and success:
         O.add("masked_image",
               np.asarray(masked_image, dtype=np.int32),
               3,
               pipeline=True)
     else:
         O.add("masked_image",
               np.zeros(shape=image.shape, dtype=np.int32),
               3,
               pipeline=True)
     O.add("success", success, 0, pipeline=True)
     out_package["6_analyse"] = O.get_dict(
         self.conf["general"]["output_level"], self.pipeline_mode)
     tmp_package["6_analyse"] = O.get_dict(5, True)
     return out_package, tmp_package
예제 #5
0
파일: worker.py 프로젝트: zbsun/spts
 def _work_detect(self, work_package, tmp_package, out_package):
     i = work_package["i"]
     O = OutputCollector()
     image_denoised = tmp_package["3_denoise"]["image_denoised"]
     image_thresholded = tmp_package["4_threshold"]["image_thresholded"]
     # Detect particles
     log_debug(logger, "(%i/%i) Detect particles" % (i + 1, self.N_arr))
     n_max = self.conf["detect"]["n_particles_max"]
     success, i_labels, image_labels, area, x, y, score, merged, dist_neighbor, dislocation = spts.detect.find_particles(
         image_denoised,
         image_thresholded,
         self.conf["detect"]["min_dist"],
         n_max,
         peak_centering=self.conf["detect"]["peak_centering"])
     n_labels = len(i_labels)
     success = success and (n_labels > 0) and (n_labels <= n_max)
     log_info(logger,
              "(%i/%i) Found %i particles" % (i + 1, self.N_arr, n_labels))
     if success:
         O.add("n", n_labels, 0, pipeline=True)
         O.add("x", uniform_particle_array(x, n_max), 0, pipeline=True)
         O.add("y", uniform_particle_array(y, n_max), 0, pipeline=True)
         O.add("peak_score",
               uniform_particle_array(score, n_max),
               0,
               pipeline=False)
         O.add("area", uniform_particle_array(area, n_max, np.int32), 0)
         O.add("merged",
               uniform_particle_array(merged, n_max, np.int16),
               0,
               pipeline=True)
         O.add("dist_neighbor",
               uniform_particle_array(dist_neighbor, n_max), 0)
         O.add("i_labels",
               uniform_particle_array(i_labels, n_max),
               5,
               pipeline=True)
         O.add("image_labels", image_labels, 5, pipeline=True)
         O.add("dislocation",
               uniform_particle_array(dislocation, n_max),
               0,
               pipeline=False)
     else:
         O.add("n", 0, 0, pipeline=True)
         O.add("x", uniform_particle_array([], n_max), 0, pipeline=True)
         O.add("y", uniform_particle_array([], n_max), 0, pipeline=True)
         O.add("peak_score",
               uniform_particle_array([], n_max),
               0,
               pipeline=False)
         O.add("area", uniform_particle_array([], n_max, np.int32), 0)
         O.add("merged",
               uniform_particle_array([], n_max, np.int16),
               0,
               pipeline=True)
         O.add("dist_neighbor", uniform_particle_array([], n_max), 0)
         O.add("i_labels",
               uniform_particle_array([], n_max),
               5,
               pipeline=True)
         O.add("image_labels",
               np.zeros_like(image_thresholded),
               5,
               pipeline=True)
         O.add("dislocation",
               uniform_particle_array([], n_max),
               0,
               pipeline=False)
     O.add("success", success, 0, pipeline=True)
     out_package["5_detect"] = O.get_dict(
         self.conf["general"]["output_level"], self.pipeline_mode)
     tmp_package["5_detect"] = O.get_dict(5, True)
     return out_package, tmp_package
예제 #6
0
파일: run_spts.py 프로젝트: FilipeMaia/spts
        is_worker = comm.rank > 0
        H = h5writer.H5WriterMPISW("./spts.cxi", comm=comm, chunksize=100, compression=None)
        if is_worker:
            W = spts.worker.Worker(conf, i0_offset=comm.rank-1, step_size=comm.size-1)
    else:
        is_worker = True
        H = h5writer.H5Writer("./spts.cxi")
        W = spts.worker.Worker(conf)

    if is_worker:
        if args.cores > 1:
            mulpro.mulpro(Nprocesses=args.cores-1, worker=W.work, getwork=W.get_work, logres=H.write_slice)
        else:
            while True:
                t0 = time.time()
                log_debug(logger, "Read work package (analysis)")
                w = W.get_work()
                if w is None:
                    log_debug(logger, "No more images to process")
                    break
                log_debug(logger, "Start work")
                l = W.work(w)
                t1 = time.time()
                t_work = t1-t0
                t0 = time.time()
                H.write_slice(l)
                t1 = time.time()
                t_write = t1-t0
                log_info(logger, "work %.2f sec / write %.2f sec" % (t_work, t_write))            

    H.write_solo({'__version__': spts.__version__})