def test_sd_default(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) params = {"roi": {}} analysis = [{ "analysisType": 'SD_FRAMES', "parameters": params, }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sd_result.npy') results = np.load(data_path) udf = StdDevUDF() expected = lt_ctx.run_udf(dataset=hdf5_ds_2, udf=udf) assert np.allclose( results, expected['varsum'], )
def test_concurrent_executor(lt_ctx, concurrent_ctx, default_raw, use_roi): if use_roi: roi = np.random.choice([True, False], default_raw.shape.nav) else: roi = None mask = np.random.random(default_raw.shape.sig) def mask_factory(): return mask load_params = { 'filetype': 'raw', 'path': default_raw._path, 'nav_shape': default_raw.shape.nav, 'sig_shape': default_raw.shape.sig, 'dtype': default_raw.dtype } udfs = [StdDevUDF(), ApplyMasksUDF(mask_factories=[mask_factory])] ref_res = lt_ctx.run_udf(dataset=default_raw, udf=udfs, roi=roi) ds = concurrent_ctx.load(**load_params) res = concurrent_ctx.run_udf(dataset=ds, udf=udfs, roi=roi) assert len(ref_res) == len(res) for index, value in enumerate(ref_res): for key, ref in value.items(): assert np.allclose(ref.data, res[index][key].data, equal_nan=True) for key in res[index].keys(): assert key in value
def _make_udfs(ds): def factory(): m = np.zeros(ds.shape.sig) m[-1, -1] = 1.3 return m udfs = [StdDevUDF(), ApplyMasksUDF(mask_factories=[factory])] return udfs
def test_sd_roi(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) roi_params = { "shape": "rect", "x": 1, "y": 2, "width": 6, "height": 6 } analysis = [{ "analysisType": 'SD_FRAMES', "parameters": { "roi": roi_params } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sd_result.npy') results = np.load(data_path) nx, ny = hdf5_ds_2.shape.nav roi = masks.rectangular( X=roi_params["x"], Y=roi_params["y"], Width=roi_params["width"], Height=roi_params["height"], imageSizeX=nx, imageSizeY=ny) udf = StdDevUDF() expected = lt_ctx.run_udf(dataset=hdf5_ds_2, udf=udf, roi=roi) assert np.allclose( results, expected['varsum'].raw_data, )
def get_sd_results(self, executor, cancel_id, job_is_cancelled): stddev_udf = StdDevUDF() roi = get_roi(params=self.parameters, shape=self.dataset.shape.nav) return self.run_sd_udf(roi, stddev_udf, executor, cancel_id, job_is_cancelled)
async def controller(self, cancel_id, executor, job_is_cancelled, send_results): stddev_udf = StdDevUDF() roi = self.get_sd_roi() result_iter = UDFRunner(stddev_udf).run_for_dataset_async( self.dataset, executor, roi=roi, cancel_id=cancel_id) async for sd_udf_results in result_iter: pass if job_is_cancelled(): raise JobCancelledError() sd_udf_results['var'].data sd_udf_results['num_frame'].data sd_udf_results = dict(sd_udf_results.items()) sd_udf_results['var'] = sd_udf_results['var'].data / sd_udf_results[ 'num_frame'].data sd_udf_results['std'] = np.sqrt(sd_udf_results['var'].data) sd_udf_results['mean'] = sd_udf_results[ 'sum_frame'].data / sd_udf_results['num_frame'].data sd_udf_results['num_frame'] = sd_udf_results['num_frame'].data sd_udf_results['sum_frame'] = sd_udf_results['sum_frame'].data center = (self.parameters["cy"], self.parameters["cx"]) rad_in = self.parameters["ri"] rad_out = self.parameters["ro"] delta = self.parameters["delta"] n_peaks = self.parameters["n_peaks"] min_dist = self.parameters["min_dist"] savg = sd_udf_results['mean'] sstd = sd_udf_results['std'] sshape = sstd.shape if not (center is None or rad_in is None or rad_out is None): mask_out = 1 * _make_circular_mask(center[1], center[0], sshape[1], sshape[0], rad_out) mask_in = 1 * _make_circular_mask(center[1], center[0], sshape[1], sshape[0], rad_in) mask = mask_out - mask_in masked_sstd = sstd * mask else: masked_sstd = sstd coordinates = peak_local_max(masked_sstd, num_peaks=n_peaks, min_distance=min_dist) udf = feature.FeatureVecMakerUDF(delta=delta, savg=savg, coordinates=coordinates) result_iter = UDFRunner(udf).run_for_dataset_async(self.dataset, executor, cancel_id=cancel_id) async for udf_results in result_iter: pass if job_is_cancelled(): raise JobCancelledError() results = await run_blocking( self.get_udf_results, udf_results=udf_results, roi=roi, ) await send_results(results, True)
async def controller(self, cancel_id, executor, job_is_cancelled, send_results): stddev_udf = StdDevUDF() roi = self.get_sd_roi() result_iter = UDFRunner(stddev_udf).run_for_dataset_async( self.dataset, executor, roi=roi, cancel_id=cancel_id) async for sd_udf_results in result_iter: pass if job_is_cancelled(): raise JobCancelledError() sd_udf_results = consolidate_result(sd_udf_results) center = (self.parameters["cy"], self.parameters["cx"]) rad_in = self.parameters["ri"] rad_out = self.parameters["ro"] n_peaks = self.parameters["n_peaks"] min_dist = self.parameters["min_dist"] sstd = sd_udf_results['std'] sshape = sstd.shape if not (center is None or rad_in is None or rad_out is None): mask_out = 1 * _make_circular_mask(center[1], center[0], sshape[1], sshape[0], rad_out) mask_in = 1 * _make_circular_mask(center[1], center[0], sshape[1], sshape[0], rad_in) mask = mask_out - mask_in masked_sstd = sstd * mask else: masked_sstd = sstd coordinates = peak_local_max(masked_sstd, num_peaks=n_peaks, min_distance=min_dist) y = coordinates[..., 0] x = coordinates[..., 1] z = range(len(y)) mask = sparse.COO(shape=(len(y), ) + tuple(self.dataset.shape.sig), coords=(z, y, x), data=1) udf = ApplyMasksUDF(mask_factories=lambda: mask, mask_count=len(y), mask_dtype=np.uint8, use_sparse=True) result_iter = UDFRunner(udf).run_for_dataset_async(self.dataset, executor, cancel_id=cancel_id) async for udf_results in result_iter: pass if job_is_cancelled(): raise JobCancelledError() results = await run_blocking( self.get_udf_results, udf_results=udf_results, roi=roi, ) await send_results(results, True)