Beispiel #1
0
 async def run_sd_udf(self, roi, stddev_udf, executor, cancel_id, job_is_cancelled):
     result_iter = UDFRunner([stddev_udf]).run_for_dataset_async(
         self.dataset, executor, roi=roi, cancel_id=cancel_id
     )
     async for (sd_udf_results,) in result_iter:
         pass
     if job_is_cancelled():
         raise JobCancelledError()
     return roi, consolidate_result(sd_udf_results)
Beispiel #2
0
 def get_udf_results(self, udf_results, roi):
     udf_results = std.consolidate_result(udf_results)
     return AnalysisResultSet([
         AnalysisResult(raw_data=udf_results['var'].data,
                        visualized=visualize_simple(udf_results['var'].data,
                                                    logarithmic=True),
                        key="intensity",
                        title="intensity",
                        desc="SD of frames"),
     ])
Beispiel #3
0
 async def run_sd_udf(self, roi, stddev_udf, executor, cancel_id,
                      job_is_cancelled):
     udf_runner_cls = executor.get_udf_runner()
     result_iter = udf_runner_cls([stddev_udf]).run_for_dataset_async(
         self.dataset, executor, roi=roi, cancel_id=cancel_id)
     async for sd_udf_results in result_iter:
         pass
     if job_is_cancelled():
         raise JobCancelledError()
     return roi, consolidate_result(sd_udf_results.buffers[0])
Beispiel #4
0
 def get_udf_results(self, udf_results, roi):
     from libertem.viz import visualize_simple
     udf_results = std.consolidate_result(udf_results)
     return AnalysisResultSet([
         AnalysisResult(
             raw_data=udf_results['std'],
             visualized=visualize_simple(
                 udf_results['std'], logarithmic=True
             ),
             key="intensity", title="intensity [log]",
             desc="Standard deviation of frames log-scaled"
         ),
         AnalysisResult(
             raw_data=udf_results['std'],
             visualized=visualize_simple(
                 udf_results['std'], logarithmic=False,
             ),
             key="intensity_lin",
             title="intensity [lin]",
             desc="Standard deviation of frames lin-scaled"
         ),
     ])
Beispiel #5
0
    async def controller(self, cancel_id, executor, job_is_cancelled,
                         send_results):
        stddev_udf = StdDevUDF()

        roi = self.get_sd_roi()

        result_iter = UDFRunner(stddev_udf).run_for_dataset_async(
            self.dataset, executor, roi=roi, cancel_id=cancel_id)
        async for sd_udf_results in result_iter:
            pass

        if job_is_cancelled():
            raise JobCancelledError()

        sd_udf_results = consolidate_result(sd_udf_results)

        center = (self.parameters["cy"], self.parameters["cx"])
        rad_in = self.parameters["ri"]
        rad_out = self.parameters["ro"]
        n_peaks = self.parameters["n_peaks"]
        min_dist = self.parameters["min_dist"]
        sstd = sd_udf_results['std']
        sshape = sstd.shape
        if not (center is None or rad_in is None or rad_out is None):
            mask_out = 1 * _make_circular_mask(center[1], center[0], sshape[1],
                                               sshape[0], rad_out)
            mask_in = 1 * _make_circular_mask(center[1], center[0], sshape[1],
                                              sshape[0], rad_in)
            mask = mask_out - mask_in
            masked_sstd = sstd * mask
        else:
            masked_sstd = sstd

        coordinates = peak_local_max(masked_sstd,
                                     num_peaks=n_peaks,
                                     min_distance=min_dist)

        y = coordinates[..., 0]
        x = coordinates[..., 1]
        z = range(len(y))

        mask = sparse.COO(shape=(len(y), ) + tuple(self.dataset.shape.sig),
                          coords=(z, y, x),
                          data=1)

        udf = ApplyMasksUDF(mask_factories=lambda: mask,
                            mask_count=len(y),
                            mask_dtype=np.uint8,
                            use_sparse=True)

        result_iter = UDFRunner(udf).run_for_dataset_async(self.dataset,
                                                           executor,
                                                           cancel_id=cancel_id)
        async for udf_results in result_iter:
            pass

        if job_is_cancelled():
            raise JobCancelledError()

        results = await run_blocking(
            self.get_udf_results,
            udf_results=udf_results,
            roi=roi,
        )
        await send_results(results, True)