async def get_preview_image(self, dataset_uuid): ds = self.data.get_dataset(dataset_uuid) job = SumFramesJob(dataset=ds) executor = self.data.get_executor() log.info("creating preview for dataset %s" % dataset_uuid) futures = [] for task in job.get_tasks(): submit_kwargs = {} futures.append(executor.client.submit(task, **submit_kwargs)) log.info("preview futures created") full_result = np.zeros(shape=ds.shape[2:]) async for future, result in dd.as_completed(futures, with_results=True): for tile in result: tile.copy_to_result(full_result) log.info("preview done, encoding image (dtype=%s)", full_result.dtype) image = await run_blocking( _encode_image, full_result, colormap=cm.gist_earth, save_kwargs={'format': 'png'}, ) log.info("image encoded, sending response") return image.read()
async def async_main(ds_kwargs, address): executor = await AsyncDaskJobExecutor.connect(address) ds = load(**ds_kwargs) sum_job = SumFramesJob(dataset=ds) (y, x) = sum_job.get_result_shape() sum_image = get_result_image(sum_job) sum_buffer = sum_image.GetNumArray() doc = DM.NewImageDocument("test document") d = doc.AddImageDisplay(sum_image, 1) c = d.AddNewComponent(5, int(y * 0.4), int(x * 0.4), int(y * 0.6), int(x * 0.6)) c.SetForegroundColor(1, 0, 0) doc.Show() async for _ in run(executor, sum_job, sum_buffer): sum_image.UpdateImage() rect = c.GetRect() mask = mask_factory_from_rect(rect, tuple(ds.shape.sig)) rect_job = ApplyMasksJob(dataset=ds, mask_factories=[mask]) result_buffer = np.zeros(rect_job.get_result_shape()) result_image = DM.CreateImage(result_buffer[0]) result_image.ShowImage() result_image_buffer = result_image.GetNumArray() # For now we do a limited number of runs # FIXME implement a proper way to exit the loop counter = 0 while counter < 20: counter += 1 result_buffer[:] = 0 async for _ in run(executor, rect_job, result_buffer): np.copyto( result_image_buffer, # The reshape is a workaround for a bug in the current alpha version of DM # This will not be required in the final DM release result_buffer[0].reshape(result_image_buffer.shape), casting='unsafe') result_image.UpdateImage() while True: newrect = c.GetRect() if newrect != rect: rect = newrect mask = mask_factory_from_rect(rect, tuple(ds.shape.sig)) rect_job = ApplyMasksJob(dataset=ds, mask_factories=[mask]) break await asyncio.sleep(1)
async def test_run_job(aexecutor): data = _mk_random(size=(16, 16, 16, 16), dtype='<u2') dataset = MemoryDataSet(data=data, tileshape=(1, 16, 16), num_partitions=2) expected = data.sum(axis=(0, 1)) job = SumFramesJob(dataset=dataset) out = job.get_result_buffer() async for tiles in aexecutor.run_job(job): for tile in tiles: tile.reduce_into_result(out) assert out.shape == (16, 16) assert np.allclose(out, expected)
def run_blobfinder(ctx, dataset, parameters): sum_job = SumFramesJob(dataset=dataset) sum_result = ctx.run(sum_job) sum_result = np.log(sum_result - np.min(sum_result) + 1) peaks = get_peaks( parameters=parameters, framesize=tuple(dataset.shape.sig), sum_result=sum_result, ) pass_2_results = ctx.run_udf( dataset=dataset, fn=pass_2, init=functools.partial(init_pass_2, peaks=peaks, parameters=parameters), make_buffers=functools.partial( get_result_buffers_pass_2, num_disks=parameters['num_disks'], ), ) return (sum_result, pass_2_results['centers'], pass_2_results['refineds'], pass_2_results['peak_values'], pass_2_results['peak_elevations'], peaks)
async def async_main(address): # start background task: (can be replaced with asyncio.create_task(coro) in Python 3.7) background_events = asyncio.ensure_future(background_task()) executor = await AsyncDaskJobExecutor.connect(address) if len(sys.argv) > 1: path = sys.argv[1] else: path = ( "C:/Users/weber/Nextcloud/Projects/Open Pixelated STEM framework/" "Data/3rd-Party Datasets/Glasgow/10 um 110.blo") ds = load("blo", path=path, tileshape=(1, 8, 144, 144)) ds.initialize() job = SumFramesJob(dataset=ds) out = get_result_buffer(job) async for part_result in run(executor, job, out): print("Partial result sum: ", out.sum()) print("Final result sum: ", out.sum()) # stop the background task: background_events.cancel()
async def get_preview_image(self, dataset_uuid): ds = self.data.get_dataset(dataset_uuid) job = SumFramesJob(dataset=ds) dask_client = await AioClient("tcp://localhost:8786") executor = DaskJobExecutor(client=dask_client, is_local=True) futures = [] for task in job.get_tasks(): submit_kwargs = {} futures.append(executor.client.submit(task, **submit_kwargs)) full_result = np.zeros(shape=ds.shape[2:]) async for future, result in dd.as_completed(futures, with_results=True): for tile in result: tile.copy_to_result(full_result) image = _encode_image( full_result, colormap=cm.gist_earth, save_kwargs={'format': 'png'}, ) return image.read()
def run_blobfinder(ctx, dataset, parameters): sum_job = SumFramesJob(dataset=dataset) sum_result = ctx.run(sum_job) sum_result = np.log(sum_result - np.min(sum_result) + 1) peaks = get_peaks( parameters=parameters, sum_result=sum_result, ) pass_2_results = run_blobcorrelation(ctx, dataset, peaks, parameters) return (sum_result, pass_2_results['centers'], pass_2_results['refineds'], pass_2_results['peak_values'], pass_2_results['peak_elevations'], peaks)
async def start_sum_frames_job(self, uuid, params, analysis, ds): job = SumFramesJob(dataset=ds) full_result = np.zeros(shape=tuple(ds.shape[:2])) job_runner = self.run_job( full_result=full_result, uuid=uuid, ds=ds, job=job, ) try: await job_runner.asend(None) while True: images = await self.visualize( full_result, analysis, save_kwargs={'format': 'png'}, ) await job_runner.asend(images) except StopAsyncIteration: pass
async def async_main(address): GUI_events = asyncio.ensure_future(background_task()) executor = await AsyncDaskJobExecutor.connect(address) # Just an alternative dataset that works better on a slower machine # ds = load( # "blo", # path=("C:/Users/weber/Nextcloud/Projects/Open Pixelated STEM framework/" # "Data/3rd-Party Datasets/Glasgow/10 um 110.blo"), # tileshape=(1,8,144,144) # ) # For a remote cluster this has to be the path on the worker nodes, not the client ds = load("raw", path='/data/users/weber/scan_11_x256_y256.raw', dtype="float32", scan_size=(256, 256), detector_size_raw=(130, 128), crop_detector_to=(128, 128)) sum_job = SumFramesJob(dataset=ds) (y, x) = sum_job.get_result_shape() sum_image = get_result_image(sum_job) sum_buffer = sum_image.GetNumArray() doc = DM.NewImageDocument("test document") d = doc.AddImageDisplay(sum_image, 1) c = d.AddNewComponent(5, int(y * 0.4), int(x * 0.4), int(y * 0.6), int(x * 0.6)) c.SetForegroundColor(1, 0, 0) doc.Show() async for _ in run(executor, sum_job, sum_buffer): sum_image.UpdateImage() rect = c.GetRect() mask = mask_factory_from_rect(rect, tuple(ds.shape.sig)) rect_job = ApplyMasksJob(dataset=ds, mask_factories=[mask]) result_buffer = np.zeros(rect_job.get_result_shape()) result_image = DM.CreateImage(result_buffer[0]) result_image.ShowImage() result_image_buffer = result_image.GetNumArray() # For now we do a limited number of runs # FIXME implement a proper way to exit the loop counter = 0 while counter < 20: counter += 1 result_buffer[:] = 0 async for _ in run(executor, rect_job, result_buffer): np.copyto( result_image_buffer, # The reshape is a workaround for a bug in the current alpha version of DM # This will not be required in the final DM release result_buffer[0].reshape(result_image_buffer.shape), casting='unsafe') result_image.UpdateImage() while True: newrect = c.GetRect() if newrect != rect: rect = newrect mask = mask_factory_from_rect(rect, tuple(ds.shape.sig)) rect_job = ApplyMasksJob(dataset=ds, mask_factories=[mask]) break await asyncio.sleep(1) GUI_events.cancel()
def get_job(self): return SumFramesJob(dataset=self.dataset)