def test_pick_analysis(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": 'PICK_FRAME', "parameters": { 'x': 4, 'y': 4, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'pick_result.npy') results = np.load(data_path) analysis = lt_ctx.create_pick_analysis( dataset=hdf5_ds_2, x=4, y=4, ) expected = lt_ctx.run(analysis) assert np.allclose( results, expected['intensity'].raw_data, )
def test_ring_tcp_cluster(lt_ctx, random_hdf5_1): conn = {"connection": {"type": "TCP", "address": "tcp://scheduler:8786"}} ds = random_hdf5_1 ds_path = ds.path tmp_dir = os.path.dirname(ds_path) dataset = _get_hdf5_params(ds_path) analysis = [{ "analysisType": "APPLY_RING_MASK", "parameters": { 'shape': 'ring', 'cx': 8, 'cy': 8, 'ri': 5, 'ro': 8, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": tmp_dir}}) data_path = os.path.join(tmp_dir, 'ring_result.npy') results = np.load(data_path) analysis = lt_ctx.create_ring_analysis(dataset=ds, cx=8, cy=8, ri=5, ro=8) expected = lt_ctx.run(analysis) assert np.allclose( results, expected['intensity'].raw_data, )
def test_fem_analysis(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) params = { 'shape': 'ring', 'cx': 1, 'cy': 1, 'ri': 0, 'ro': 1, } analysis = [{"analysisType": 'FEM', "parameters": params}] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'fem_result.npy') results = np.load(data_path) analysis = FEMUDF(center=(1, 1), rad_in=0, rad_out=1) expected = lt_ctx.run_udf(dataset=hdf5_ds_2, udf=analysis) assert np.allclose( results, expected['intensity'], )
def test_pick_fft_analysis(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) params = { 'x': 4, 'y': 4, 'real_rad': 4, 'real_centerx': 8, 'real_centery': 8, } analysis = [{ "analysisType": 'PICK_FFT_FRAME', "parameters": params, }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'pickfft_result.npy') results = np.load(data_path) analysis = PickFFTFrameAnalysis(dataset=hdf5_ds_2, parameters=params) expected = lt_ctx.run(analysis) assert np.allclose( results, expected['intensity'].raw_data, )
def test_fft_analysis(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) params = { 'rad_in': 4, 'rad_out': 8, 'real_rad': 4, 'real_centerx': 8, 'real_centery': 8 } analysis = [{ "analysisType": 'APPLY_FFT_MASK', "parameters": params, }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'fft_result.npy') results = np.load(data_path) analysis = ApplyFFTMask(dataset=hdf5_ds_2, parameters=params) expected = lt_ctx.run(analysis) assert np.allclose( results, expected['intensity'].raw_data, )
def test_sd_default(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) params = {"roi": {}} analysis = [{ "analysisType": 'SD_FRAMES', "parameters": params, }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sd_result.npy') results = np.load(data_path) udf = StdDevUDF() expected = lt_ctx.run_udf(dataset=hdf5_ds_2, udf=udf) assert np.allclose( results, expected['varsum'], )
def test_disk_default(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": 'APPLY_DISK_MASK', "parameters": { 'shape': 'disk', 'cx': 8, 'cy': 8, 'r': 5, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'disk_result.npy') results = np.load(data_path) disk_analysis = lt_ctx.create_disk_analysis(dataset=hdf5_ds_2, cx=8, cy=8, r=5) expected = lt_ctx.run(disk_analysis) assert np.allclose( results, expected['intensity'].raw_data, )
def test_point_default(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": "APPLY_POINT_SELECTOR", "parameters": { 'shape': 'point', 'cx': 8, 'cy': 8, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'point_result.npy') results = np.load(data_path) analysis = lt_ctx.create_point_analysis(dataset=hdf5_ds_2, x=8, y=8) expected = lt_ctx.run(analysis) assert np.allclose( results, expected['intensity'].raw_data, )
async def test_clust_default(hdf5_ds_2, tmpdir_factory, inline_executor): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": 'CLUST', "parameters": { 'n_peaks': 42, 'n_clust': 7, 'cy': 3, 'cx': 3, 'ri': 1, 'ro': 5, 'delta': 0.05, 'min_dist': 1, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'cluster_result.npy') results = np.load(data_path) executor = AsyncAdapter(wrapped=inline_executor) analysis = ClusterAnalysis(dataset=hdf5_ds_2, parameters={ 'n_peaks': 42, 'n_clust': 7, 'cy': 3, 'cx': 3, 'ri': 1, 'ro': 5, 'delta': 0.05, 'min_dist': 1, }) uuid = 'bd3b39fb-0b34-4a45-9955-339da6501bbb' res_container = ResultContainer() async def send_results(results, finished): pass await analysis.controller( cancel_id=uuid, executor=executor, job_is_cancelled=lambda: False, send_results=res_container, ) expected = res_container.results assert np.allclose(results, expected['intensity'].raw_data)
def test_com_default(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": 'CENTER_OF_MASS', "parameters": { 'shape': 'com', 'cx': 0, 'cy': 0, 'r': 8, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) channels = [ "field", "magnitude", "divergence", "curl", "x", "y" ] results = {} for channel in channels: data_path = os.path.join(datadir, f"com_result_{channel}.npy") results[channel] = np.load(data_path) com_analysis = lt_ctx.create_com_analysis( dataset=hdf5_ds_2, cx=0, cy=0, mask_radius=8 ) expected = lt_ctx.run(com_analysis) assert np.allclose(results["field"], expected["field"].raw_data) assert np.allclose(results["magnitude"], expected["magnitude"].raw_data) assert np.allclose(results["divergence"], expected["divergence"].raw_data) assert np.allclose(results["curl"], expected["curl"].raw_data) assert np.allclose(results["x"], expected["x"].raw_data) assert np.allclose(results["y"], expected["y"].raw_data)
def test_radial_fourier_default(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{ "analysisType": "RADIAL_FOURIER", "parameters": { 'shape': 'radial_fourier', 'cx': 0, 'cy': 0, 'ri': 0, 'ro': 2, 'n_bins': 2, 'max_order': 7, } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) channels = ["absolute_0_0", "absolute_0_1"] results = {} for channel in channels: data_path = os.path.join(datadir, f"radial_result_{channel}.npy") results[channel] = np.load(data_path) analysis = lt_ctx.create_radial_fourier_analysis(dataset=hdf5_ds_2, cx=0, cy=0, ri=0, ro=2, n_bins=2, max_order=7) expected = lt_ctx.run(analysis) assert np.allclose(results["absolute_0_0"], expected["absolute_0_0"].raw_data) assert np.allclose(results["absolute_0_1"], expected["absolute_0_1"].raw_data)
def test_sum_default(hdf5_ds_2, tmpdir_factory): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) analysis = [{"analysisType": "SUM_FRAMES", "parameters": {"roi": {}}}] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sum_result.npy') result = np.load(data_path) with hdf5_ds_2.get_reader().get_h5ds() as h5ds: data = h5ds[:] expected = data.sum(axis=(0, 1)) assert np.allclose(expected, result)
def test_sd_roi(hdf5_ds_2, tmpdir_factory, lt_ctx): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'local'}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) roi_params = { "shape": "rect", "x": 1, "y": 2, "width": 6, "height": 6 } analysis = [{ "analysisType": 'SD_FRAMES', "parameters": { "roi": roi_params } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) out = ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sd_result.npy') results = np.load(data_path) nx, ny = hdf5_ds_2.shape.nav roi = masks.rectangular( X=roi_params["x"], Y=roi_params["y"], Width=roi_params["width"], Height=roi_params["height"], imageSizeX=nx, imageSizeY=ny) udf = StdDevUDF() expected = lt_ctx.run_udf(dataset=hdf5_ds_2, udf=udf, roi=roi) assert np.allclose( results, expected['varsum'].raw_data, )
def test_sum_roi(hdf5_ds_2, tmpdir_factory, lt_ctx, local_cluster_url): datadir = tmpdir_factory.mktemp('template_tests') conn = {'connection': {'type': 'tcp', 'address': local_cluster_url}} path = hdf5_ds_2.path dataset = _get_hdf5_params(path) roi_params = { "shape": "disk", "cx": 8, "cy": 8, "r": 6 } analysis = [{ "analysisType": "SUM_FRAMES", "parameters": { "roi": roi_params } }] notebook = notebook_generator(conn, dataset, analysis, save=True) notebook = io.StringIO(notebook.getvalue()) nb = nbformat.read(notebook, as_version=4) ep = ExecutePreprocessor(timeout=600) ep.preprocess(nb, {"metadata": {"path": datadir}}) data_path = os.path.join(datadir, 'sum_result.npy') results = np.load(data_path) nx, ny = hdf5_ds_2.shape.nav roi = masks.circular( centerX=roi_params['cx'], centerY=roi_params['cy'], imageSizeX=nx, imageSizeY=ny, radius=roi_params['r'], ) udf = SumUDF() expected = lt_ctx.run_udf(hdf5_ds_2, udf, roi) assert np.allclose( results, expected['intensity'].raw_data, )