def test_deconvolve_and_restore_cube_spectral(self): self.actualSetUp(add_errors=True) dirty_graph = create_invert_graph(self.vis_graph_list, self.model_graph, context='wstack', vis_slices=51, dopsf=False, normalize=True) psf_graph = create_invert_graph(self.vis_graph_list, self.model_graph, context='wstack', vis_slices=51, dopsf=True, normalize=True) dec_graph = create_deconvolve_graph(dirty_graph, psf_graph, self.model_graph, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], threshold=0.1, nmajor=0, gain=0.7) residual_graph = create_residual_graph(self.vis_graph_list, model_graph=dec_graph, context='wstack', vis_slices=51) rest_graph = create_restore_graph(dec_graph, psf_graph, residual_graph) restored = rest_graph[0].compute() export_image_to_fits( restored, '%s/test_imaging_delayed_restored.fits' % self.dir)
def _invert_base(self, context='2d', extra='', flux_max=100.0, flux_min=-0.2, flux_tolerance=5.0): dirty_graph = create_invert_graph(self.vis_graph_list, self.model_graph, context=context, dopsf=False, normalize=True, **self.params) if self.compute: dirty = dirty_graph[0].compute() export_image_to_fits( dirty[0], '%s/test_imaging_delayed_invert_%s%s_dirty.fits' % ( self.dir, context, extra, )) qa = qa_image(dirty[0]) assert numpy.abs(qa.data['max'] - flux_max) < flux_tolerance, str(qa) assert numpy.abs(qa.data['min'] - flux_min) < flux_tolerance, str(qa)
def create_continuum_imaging_pipeline_graph(vis_graph_list, model_graph: delayed, context='2d', **kwargs) -> delayed: """ Create graph for the continuum imaging pipeline. Same as ICAL but with no selfcal. :param vis_graph_list: :param model_graph: :param c_deconvolve_graph: Default: create_deconvolve_graph :param c_invert_graph: Default: create_invert_graph :param c_residual_graph: Default: Default: create_residual graph :param kwargs: Parameters for functions in graphs :return: """ psf_graph = create_invert_graph(vis_graph_list, model_graph, dopsf=True, context=context, **kwargs) residual_graph = create_residual_graph(vis_graph_list, model_graph, context=context, **kwargs) deconvolve_model_graph = create_deconvolve_graph(residual_graph, psf_graph, model_graph, **kwargs) nmajor = get_parameter(kwargs, "nmajor", 5) if nmajor > 1: for cycle in range(nmajor): residual_graph = create_residual_graph(vis_graph_list, deconvolve_model_graph, context=context, **kwargs) deconvolve_model_graph = create_deconvolve_graph( residual_graph, psf_graph, deconvolve_model_graph, **kwargs) residual_graph = create_residual_graph(vis_graph_list, deconvolve_model_graph, context=context, **kwargs) restore_graph = create_restore_graph(deconvolve_model_graph, psf_graph, residual_graph) return delayed((deconvolve_model_graph, residual_graph, restore_graph))
def _invert_base(self, context='2d', extra='', positionthreshold=1.0, check_components=True): dirty_graph = create_invert_graph(self.vis_graph_list, self.model_graph, context=context, dopsf=False, normalize=True, **self.params) if self.compute: dirty = dirty_graph[0].compute() export_image_to_fits( dirty[0], '%s/test_imaging_graph_invert_%s%s_dirty.fits' % ( self.results_dir, context, extra, )) qa = qa_image(dirty[0]) assert numpy.abs(qa.data['max'] - 100.0) < 5.0, str(qa) assert numpy.abs(qa.data['min'] + 2.0) < 5.0, str(qa)
polarisation_frame=PolarisationFrame("stokesI"))) model = future.result() cmodel = smooth_image(model) export_image_to_fits( cmodel, '%s/imaging-low-cmodel_npixel_%d.fits' % (results_dir, npixel)) cmodel = None predicted_vis_graph_list = create_predict_graph(vis_graph_list, model) predicted_vis_graph_list = create_weight_vis_graph_list( predicted_vis_graph_list, model) predicted_vis_graph_list = compute_list(c, predicted_vis_graph_list) # Make the dirty image and point spread function future = c.compute(create_invert_graph(predicted_vis_graph_list, model)) dirty, sumwt = future.result() print("Max, min in dirty image = %.6f, %.6f, sumwt = %s" % (dirty.data.max(), dirty.data.min(), sumwt)) export_image_to_fits( dirty, '%s/imaging-low-dirty_npixel_%d.fits' % (results_dir, npixel)) dirty = None future = c.compute( create_invert_graph(predicted_vis_graph_list, model, dopsf=True)) psf, sumwt = future.result() print("Max, min in PSF = %.6f, %.6f, sumwt = %s" % (psf.data.max(), psf.data.min(), sumwt)) export_image_to_fits(
def trial_case(results, seed=180555, context='wstack', nworkers=8, threads_per_worker=1, processes=True, order='frequency', nfreqwin=7, ntimes=3, rmax=750.0, facets=1, wprojection_planes=1): """ Single trial for performance-timings Simulates visibilities from GLEAM including phase errors Makes dirty image and PSF Runs ICAL pipeline The results are in a dictionary: 'context': input - a string describing concisely the purpose of the test 'time overall', overall execution time (s) 'time create gleam', time to create GLEAM prediction graph 'time predict', time to execute GLEAM prediction graph 'time corrupt', time to corrupt data 'time invert', time to make dirty image 'time psf invert', time to make PSF 'time ICAL graph', time to create ICAL graph 'time ICAL', time to execute ICAL graph 'context', type of imaging e.g. 'wstack' 'nworkers', number of workers to create 'threads_per_worker', 'nnodes', Number of nodes, 'processes', 'order', Ordering of data 'nfreqwin', Number of frequency windows in simulation 'ntimes', Number of hour angles in simulation 'rmax', Maximum radius of stations used in simulation (m) 'facets', Number of facets in deconvolution and imaging 'wprojection_planes', Number of wprojection planes 'vis_slices', Number of visibility slices (per Visibbility) 'npixel', Number of pixels in image 'cellsize', Cellsize in radians 'seed', Random number seed 'dirty_max', Maximum in dirty image 'dirty_min', Minimum in dirty image 'psf_max', 'psf_min', 'restored_max', 'restored_min', 'deconvolved_max', 'deconvolved_min', 'residual_max', 'residual_min', 'git_info', GIT hash (not definitive since local mods are possible) :param results: Initial state :param seed: Random number seed (used in gain simulations) :param context: imaging context :param context: Type of context: '2d'|'timeslice'|'wstack' :param nworkers: Number of dask workers to use :param threads_per_worker: Number of threads per worker :param processes: Use processes instead of threads 'processes'|'threads' :param order: See create_simulate_vis_graph :param nfreqwin: See create_simulate_vis_graph :param ntimes: See create_simulate_vis_graph :param rmax: See create_simulate_vis_graph :param facets: Number of facets to use :param wprojection_planes: Number of wprojection planes to use :param kwargs: :return: results dictionary """ def check_workers(client, nworkers_initial): nworkers_final = len(client.scheduler_info()['workers']) assert nworkers_final == nworkers_initial, "Started %d workers, only %d at end" % \ (nworkers_initial, nworkers_final) numpy.random.seed(seed) results['seed'] = seed start_all = time.time() results['context'] = context results['hostname'] = socket.gethostname() results['git_hash'] = git_hash() results['epoch'] = time.strftime("%Y-%m-%d %H:%M:%S") zerow = False print("Context is %s" % context) results['nworkers'] = nworkers results['threads_per_worker'] = threads_per_worker results['processes'] = processes results['order'] = order results['nfreqwin'] = nfreqwin results['ntimes'] = ntimes results['rmax'] = rmax results['facets'] = facets results['wprojection_planes'] = wprojection_planes print("At start, configuration is {0!r}".format(results)) # Parameters determining scale frequency = numpy.linspace(0.8e8, 1.2e8, nfreqwin) if nfreqwin > 1: channel_bandwidth = numpy.array(nfreqwin * [frequency[1] - frequency[0]]) else: channel_bandwidth = numpy.array([1e6]) times = numpy.linspace(-numpy.pi / 3.0, numpy.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') vis_graph_list = create_simulate_vis_graph( 'LOWBD2', frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order=order, format='blockvis', rmax=rmax) client = get_dask_Client(n_workers=nworkers, threads_per_worker=threads_per_worker, processes=processes) nworkers_initial = len(client.scheduler_info()['workers']) check_workers(client, nworkers_initial) results['nnodes'] = len(numpy.unique(findNodes(client))) print("Defined %d workers on %d nodes" % (nworkers, results['nnodes'])) print("****** Visibility creation ******") vis_graph_list = compute_list(client, vis_graph_list) print("After creating vis_graph_list", client) # Find the best imaging parameters. wprojection_planes = 1 advice = advise_wide_field(convert_blockvisibility_to_visibility( vis_graph_list[0]), guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) kernel = advice['kernel'] npixel = advice['npixels2'] cellsize = advice['cellsize'] if context == 'timeslice': vis_slices = ntimes elif context == '2d': vis_slices = 1 kernel = '2d' else: vis_slices = advice['vis_slices'] results['vis_slices'] = vis_slices results['cellsize'] = cellsize results['npixel'] = npixel gleam_model_graph = [ delayed(create_low_test_image_from_gleam)( npixel=npixel, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], cellsize=cellsize, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI"), flux_limit=0.1, applybeam=True) for f, freq in enumerate(frequency) ] start = time.time() print("****** Starting GLEAM model creation ******") gleam_model_graph = compute_list(client, gleam_model_graph) cmodel = smooth_image(gleam_model_graph[0]) export_image_to_fits(cmodel, "pipelines-timings-delayed-gleam_cmodel.fits") end = time.time() results['time create gleam'] = end - start print("Creating GLEAM model took %.2f seconds" % (end - start)) vis_graph_list = create_predict_graph(vis_graph_list, gleam_model_graph, vis_slices=51, context=context, kernel=kernel) start = time.time() print("****** Starting GLEAM model visibility prediction ******") vis_graph_list = compute_list(client, vis_graph_list) end = time.time() results['time predict'] = end - start print("After prediction", client) print("GLEAM model Visibility prediction took %.2f seconds" % (end - start)) # Corrupt the visibility for the GLEAM model print("****** Visibility corruption ******") vis_graph_list = create_corrupt_vis_graph(vis_graph_list, phase_error=1.0) start = time.time() vis_graph_list = compute_list(client, vis_graph_list) end = time.time() results['time corrupt'] = end - start print("After corrupt", client) print("Visibility corruption took %.2f seconds" % (end - start)) # Create an empty model image model_graph = [ delayed(create_image_from_visibility)( vis_graph_list[f], npixel=npixel, cellsize=cellsize, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], polarisation_frame=PolarisationFrame("stokesI")) for f, freq in enumerate(frequency) ] model_graph = client.compute(model_graph, sync=True) psf_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, dopsf=True, kernel=kernel) start = time.time() print("****** Starting PSF calculation ******") psf, sumwt = client.compute(psf_graph, sync=True)[0] check_workers(client, nworkers_initial) end = time.time() results['time psf invert'] = end - start print("PSF invert took %.2f seconds" % (end - start)) print("After psf", client) results['psf_max'] = qa_image(psf).data['max'] results['psf_min'] = qa_image(psf).data['min'] dirty_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, kernel=kernel) start = time.time() print("****** Starting dirty image calculation ******") dirty, sumwt = client.compute(dirty_graph, sync=True)[0] check_workers(client, nworkers_initial) end = time.time() print("After dirty image", client) results['time invert'] = end - start print("Dirty image invert took %.2f seconds" % (end - start)) print("Maximum in dirty image is ", numpy.max(numpy.abs(dirty.data)), ", sumwt is ", sumwt) qa = qa_image(dirty) results['dirty_max'] = qa.data['max'] results['dirty_min'] = qa.data['min'] # Create the ICAL pipeline to run 5 major cycles, starting selfcal at cycle 1. A global solution across all # frequencies (i.e. Visibilities) is performed. start = time.time() print("****** Starting ICAL ******") start = time.time() ical_graph = create_ical_pipeline_graph(vis_graph_list, model_graph=model_graph, context=context, do_selfcal=1, nchan=nfreqwin, vis_slices=vis_slices, algorithm='mmclean', nmoments=3, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], threshold=0.1, nmajor=5, gain=0.7, timeslice='auto', global_solution=True, window_shape='quarter') end = time.time() results['time ICAL graph'] = end - start print("Construction of ICAL graph took %.2f seconds" % (end - start)) # Execute the graph start = time.time() result = client.compute(ical_graph, sync=True) deconvolved, residual, restored = result check_workers(client, nworkers_initial) end = time.time() print("After ICAL", client) results['time ICAL'] = end - start print("ICAL graph execution took %.2f seconds" % (end - start)) qa = qa_image(deconvolved[0]) results['deconvolved_max'] = qa.data['max'] results['deconvolved_min'] = qa.data['min'] export_image_to_fits(deconvolved[0], "pipelines-timings-delayed-ical_deconvolved.fits") qa = qa_image(residual[0][0]) results['residual_max'] = qa.data['max'] results['residual_min'] = qa.data['min'] export_image_to_fits(residual[0][0], "pipelines-timings-delayed-ical_residual.fits") qa = qa_image(restored[0]) results['restored_max'] = qa.data['max'] results['restored_min'] = qa.data['min'] export_image_to_fits(restored[0], "pipelines-timings-delayed-ical_restored.fits") # client.shutdown() end_all = time.time() results['time overall'] = end_all - start_all print("At end, results are {0!r}".format(results)) return results
def create_ical_pipeline_graph(vis_graph_list, model_graph: delayed, context='2d', do_selfcal=True, **kwargs) -> delayed: """Create graph for ICAL pipeline :param vis_graph_list: :param model_graph: :param context: imaging context e.g. '2d' :param kwargs: Parameters for functions in graphs :return: """ psf_graph = create_invert_graph(vis_graph_list, model_graph, dopsf=True, context=context, **kwargs) model_vis_graph_list = create_zero_vis_graph_list(vis_graph_list) model_vis_graph_list = create_predict_graph(model_vis_graph_list, model_graph, context=context, **kwargs) if do_selfcal: # Make the predicted visibilities, selfcalibrate against it correcting the gains, then # form the residual visibility, then make the residual image vis_graph_list = create_calibrate_graph_list(vis_graph_list, model_vis_graph_list, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list( vis_graph_list, model_vis_graph_list) residual_graph = create_invert_graph(residual_vis_graph_list, model_graph, dopsf=True, context=context, **kwargs) else: # If we are not selfcalibrating it's much easier and we can avoid an unnecessary round of gather/scatter # for visibility partitioning such as timeslices and wstack. residual_graph = create_residual_graph(vis_graph_list, model_graph, context=context, **kwargs) deconvolve_model_graph = create_deconvolve_graph(residual_graph, psf_graph, model_graph, **kwargs) nmajor = get_parameter(kwargs, "nmajor", 5) if nmajor > 1: for cycle in range(nmajor): if do_selfcal: model_vis_graph_list = create_zero_vis_graph_list( vis_graph_list) model_vis_graph_list = create_predict_graph( model_vis_graph_list, deconvolve_model_graph, context=context, **kwargs) vis_graph_list = create_calibrate_graph_list( vis_graph_list, model_vis_graph_list, **kwargs) residual_vis_graph_list = create_subtract_vis_graph_list( vis_graph_list, model_vis_graph_list) residual_graph = create_invert_graph(residual_vis_graph_list, model_graph, dopsf=False, context=context, **kwargs) else: residual_graph = create_residual_graph(vis_graph_list, deconvolve_model_graph, context=context, **kwargs) deconvolve_model_graph = create_deconvolve_graph( residual_graph, psf_graph, deconvolve_model_graph, **kwargs) residual_graph = create_residual_graph(vis_graph_list, deconvolve_model_graph, context=context, **kwargs) restore_graph = create_restore_graph(deconvolve_model_graph, psf_graph, residual_graph) return delayed((deconvolve_model_graph, residual_graph, restore_graph))