def test_deconvolve_spectral(self): self.actualSetUp(add_errors=True) dirty_imagelist = invert_list_arlexecute_workflow(self.vis_list, self.model_imagelist, context='2d', dopsf=False, normalize=True) psf_imagelist = invert_list_arlexecute_workflow(self.vis_list, self.model_imagelist, context='2d', dopsf=True, normalize=True) dirty_imagelist = arlexecute.persist(dirty_imagelist) psf_imagelist = arlexecute.persist(psf_imagelist) deconvolved = deconvolve_list_arlexecute_workflow( dirty_imagelist, psf_imagelist, self.model_imagelist, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], threshold=0.1, gain=0.7) deconvolved = arlexecute.persist(deconvolved) deconvolved = arlexecute.compute(deconvolved, sync=True) export_image_to_fits( deconvolved[0], '%s/test_imaging_%s_deconvolve_spectral.fits' % (self.dir, arlexecute.type()))
def test_deconvolve_and_restore_cube_mmclean_facets(self): self.actualSetUp(add_errors=True) dirty_imagelist = invert_list_arlexecute_workflow(self.vis_list, self.model_imagelist, context='2d', dopsf=False, normalize=True) psf_imagelist = invert_list_arlexecute_workflow(self.vis_list, self.model_imagelist, context='2d', dopsf=True, normalize=True) dirty_imagelist = arlexecute.persist(dirty_imagelist) psf_imagelist = arlexecute.persist(psf_imagelist) dec_imagelist = deconvolve_list_arlexecute_workflow( dirty_imagelist, psf_imagelist, self.model_imagelist, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], algorithm='mmclean', nmoment=3, nchan=self.freqwin, threshold=0.01, gain=0.7, deconvolve_facets=8, deconvolve_overlap=8, deconvolve_taper='tukey') dec_imagelist = arlexecute.persist(dec_imagelist) residual_imagelist = residual_list_arlexecute_workflow( self.vis_list, model_imagelist=dec_imagelist, context='2d') residual_imagelist = arlexecute.persist(residual_imagelist) restored_list = restore_list_arlexecute_workflow( model_imagelist=dec_imagelist, psf_imagelist=psf_imagelist, residual_imagelist=residual_imagelist, empty=self.model_imagelist) restored = arlexecute.compute(restored_list, sync=True)[0] export_image_to_fits( restored, '%s/test_imaging_%s_overlap_mmclean_restored.fits' % (self.dir, arlexecute.type()))
def weight_list_arlexecute_workflow(vis_list, model_imagelist, gcfcf=None, weighting='uniform', **kwargs): """ Weight the visibility data This is done collectively so the weights are summed over all vis_lists and then corrected :param vis_list: :param model_imagelist: Model required to determine weighting parameters :param weighting: Type of weighting :param kwargs: Parameters for functions in graphs :return: List of vis_graphs """ centre = len(model_imagelist) // 2 if gcfcf is None: gcfcf = [arlexecute.execute(create_pswf_convolutionfunction)(model_imagelist[centre])] def grid_wt(vis, model, g): if vis is not None: if model is not None: griddata = create_griddata_from_image(model) griddata = grid_weight_to_griddata(vis, griddata, g[0][1]) return griddata else: return None else: return None weight_list = [arlexecute.execute(grid_wt, pure=True, nout=1)(vis_list[i], model_imagelist[i], gcfcf) for i in range(len(vis_list))] merged_weight_grid = arlexecute.execute(griddata_merge_weights, nout=1)(weight_list) merged_weight_grid = arlexecute.persist(merged_weight_grid, broadcast=True) def re_weight(vis, model, gd, g): if gd is not None: if vis is not None: # Ensure that the griddata has the right axes so that the convolution # function mapping works agd = create_griddata_from_image(model) agd.data = gd[0].data vis = griddata_reweight(vis, agd, g[0][1]) return vis else: return None else: return vis result = [arlexecute.execute(re_weight, nout=1)(v, model_imagelist[i], merged_weight_grid, gcfcf) for i, v in enumerate(vis_list)] return arlexecute.optimize(result)
rmax = 300.0 frequency = numpy.linspace(0.9e8, 1.1e8, nfreqwin) channel_bandwidth = numpy.array(nfreqwin * [frequency[1] - frequency[0]]) times = numpy.linspace(-numpy.pi / 3.0, numpy.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') vis_list = simulate_list_arlexecute_workflow('LOWBD2', rmax=rmax, frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order='frequency') print('%d elements in vis_list' % len(vis_list)) print('About to make visibility') vis_list = arlexecute.persist(vis_list) vis_list = arlexecute.compute(vis_list, sync=True) print(vis_list[0]) advice_low = advise_wide_field(vis_list[0], guard_band_image=8.0, delA=0.02, wprojection_planes=1) advice_high = advise_wide_field(vis_list[-1], guard_band_image=8.0, delA=0.02, wprojection_planes=1) vis_slices = advice_low['vis_slices'] npixel = advice_high['npixels2'] cellsize = min(advice_low['cellsize'], advice_high['cellsize'])
def actualSetUp(self, add_errors=False, freqwin=7, block=False, dospectral=True, dopol=False, zerow=True): self.npixel = 256 self.low = create_named_configuration('LOWBD2', rmax=750.0) self.freqwin = freqwin self.vis_list = list() self.ntimes = 5 cellsize = 0.001 self.times = numpy.linspace(-3.0, +3.0, self.ntimes) * numpy.pi / 12.0 self.frequency = numpy.linspace(0.8e8, 1.2e8, self.freqwin) if freqwin > 1: self.channelwidth = numpy.array( freqwin * [self.frequency[1] - self.frequency[0]]) else: self.channelwidth = numpy.array([1e6]) if dopol: self.vis_pol = PolarisationFrame('linear') self.image_pol = PolarisationFrame('stokesIQUV') f = numpy.array([100.0, 20.0, -10.0, 1.0]) else: self.vis_pol = PolarisationFrame('stokesI') self.image_pol = PolarisationFrame('stokesI') f = numpy.array([100.0]) if dospectral: flux = numpy.array( [f * numpy.power(freq / 1e8, -0.7) for freq in self.frequency]) else: flux = numpy.array([f]) self.phasecentre = SkyCoord(ra=+180.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') self.vis_list = [ arlexecute.execute(ingest_unittest_visibility)( self.low, [self.frequency[freqwin]], [self.channelwidth[freqwin]], self.times, self.vis_pol, self.phasecentre, block=block, zerow=zerow) for freqwin, _ in enumerate(self.frequency) ] self.model_imagelist = [ arlexecute.execute(create_unittest_model, nout=freqwin)(self.vis_list[freqwin], self.image_pol, cellsize=cellsize, npixel=self.npixel) for freqwin, _ in enumerate(self.frequency) ] self.componentlist = [ arlexecute.execute(create_unittest_components)( self.model_imagelist[freqwin], flux[freqwin, :][numpy.newaxis, :]) for freqwin, _ in enumerate(self.frequency) ] self.model_imagelist = [ arlexecute.execute(insert_skycomponent, nout=1)(self.model_imagelist[freqwin], self.componentlist[freqwin]) for freqwin, _ in enumerate(self.frequency) ] self.vis_list = [ arlexecute.execute(predict_skycomponent_visibility)( self.vis_list[freqwin], self.componentlist[freqwin]) for freqwin, _ in enumerate(self.frequency) ] # Calculate the model convolved with a Gaussian. self.model_imagelist = arlexecute.compute(self.model_imagelist, sync=True) model = self.model_imagelist[0] self.cmodel = smooth_image(model) export_image_to_fits( model, '%s/test_imaging_arlexecute_deconvolved_model.fits' % self.dir) export_image_to_fits( self.cmodel, '%s/test_imaging_arlexecute_deconvolved_cmodel.fits' % self.dir) if add_errors and block: self.vis_list = [ arlexecute.execute(insert_unittest_errors)(self.vis_list[i]) for i, _ in enumerate(self.frequency) ] # self.vis_list = arlexecute.compute(self.vis_list, sync=True) self.vis_list = arlexecute.persist(self.vis_list) self.model_imagelist = arlexecute.scatter(self.model_imagelist)
phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') vis_list = simulate_list_arlexecute_workflow( 'LOWBD2', rmax=rmax, frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order='frequency') print('%d elements in vis_list' % len(vis_list)) print('About to make visibility') vis_list = arlexecute.persist(vis_list) # The vis data are on the workers so we run the advice function on the workers # without transfering the data back to the host. advice_list = [ arlexecute.execute(advise_wide_field)(v, guard_band_image=8.0, delA=0.02, wprojection_planes=1) for _, v in enumerate(vis_list) ] advice_list = arlexecute.compute(advice_list, sync=True) advice_low = advice_list[0] advice_high = advice_list[-1] print(advice_list[centre])
arlexecute.run(init_logging) # Load data from previous simulation vislist = import_blockvisibility_from_hdf5('gleam_simulation_vislist.hdf') ntimes = len(vislist[0].time) cellsize = 0.001 npixel = 1024 pol_frame = PolarisationFrame("stokesI") model_list = [ arlexecute.execute(create_image_from_visibility)( v, npixel=1024, cellsize=cellsize, polarisation_frame=pol_frame) for v in vislist ] model_list = arlexecute.persist(model_list) controls = create_calibration_controls() controls['T']['first_selfcal'] = 1 controls['G']['first_selfcal'] = 3 controls['B']['first_selfcal'] = 4 controls['T']['timescale'] = 'auto' controls['G']['timescale'] = 'auto' controls['B']['timescale'] = 1e5 pp.pprint(controls) vislist = arlexecute.scatter(vislist) ical_list = ical_list_arlexecute_workflow(vislist,
def trial_case(results, seed=180555, context='wstack', nworkers=8, threads_per_worker=1, memory=8, processes=True, order='frequency', nfreqwin=7, ntimes=3, rmax=750.0, facets=1, wprojection_planes=1, use_dask=True, use_serial=False): """ Single trial for performance-timings Simulates visibilities from GLEAM including phase errors Makes dirty image and PSF Runs ICAL pipeline The results are in a dictionary: 'context': input - a string describing concisely the purpose of the test 'time overall', overall execution time (s) 'time create gleam', time to create GLEAM prediction graph 'time predict', time to execute GLEAM prediction graph 'time corrupt', time to corrupt data_models 'time invert', time to make dirty image 'time psf invert', time to make PSF 'time ICAL graph', time to create ICAL graph 'time ICAL', time to execute ICAL graph 'context', type of imaging e.g. 'wstack' 'nworkers', number of workers to create 'threads_per_worker', 'nnodes', Number of nodes, 'processes', 'order', Ordering of data_models 'nfreqwin', Number of frequency windows in simulation 'ntimes', Number of hour angles in simulation 'rmax', Maximum radius of stations used in simulation (m) 'facets', Number of facets in deconvolution and imaging 'wprojection_planes', Number of wprojection planes 'vis_slices', Number of visibility slices (per Visibbility) 'npixel', Number of pixels in image 'cellsize', Cellsize in radians 'seed', Random number seed 'dirty_max', Maximum in dirty image 'dirty_min', Minimum in dirty image 'psf_max', 'psf_min', 'restored_max', 'restored_min', 'deconvolved_max', 'deconvolved_min', 'residual_max', 'residual_min', 'git_info', GIT hash (not definitive since local mods are possible) :param results: Initial state :param seed: Random number seed (used in gain simulations) :param context: imaging context :param context: Type of context: '2d'|'timeslice'|'wstack' :param nworkers: Number of dask workers to use :param threads_per_worker: Number of threads per worker :param processes: Use processes instead of threads 'processes'|'threads' :param order: See simulate_list_list_arlexecute_workflow_workflowkflow :param nfreqwin: See simulate_list_list_arlexecute_workflow_workflowkflow :param ntimes: See simulate_list_list_arlexecute_workflow_workflowkflow :param rmax: See simulate_list_list_arlexecute_workflow_workflowkflow :param facets: Number of facets to use :param wprojection_planes: Number of wprojection planes to use :param use_dask: Use dask or immediate evaluation :param kwargs: :return: results dictionary """ numpy.random.seed(seed) results['seed'] = seed start_all = time.time() results['context'] = context results['hostname'] = socket.gethostname() results['git_hash'] = git_hash() results['epoch'] = time.strftime("%Y-%m-%d %H:%M:%S") zerow = False print("Context is %s" % context) results['nworkers'] = nworkers results['threads_per_worker'] = threads_per_worker results['processes'] = processes results['memory'] = memory results['order'] = order results['nfreqwin'] = nfreqwin results['ntimes'] = ntimes results['rmax'] = rmax results['facets'] = facets results['wprojection_planes'] = wprojection_planes results['use_dask'] = use_dask print("At start, configuration is {0!r}".format(results)) # Parameters determining scale frequency = numpy.linspace(0.8e8, 1.2e8, nfreqwin) centre = nfreqwin // 2 if nfreqwin > 1: channel_bandwidth = numpy.array(nfreqwin * [frequency[1] - frequency[0]]) else: channel_bandwidth = numpy.array([1e6]) times = numpy.linspace(-numpy.pi / 3.0, numpy.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') if use_dask: client = get_dask_Client(threads_per_worker=threads_per_worker, memory_limit=memory * 1024 * 1024 * 1024, n_workers=nworkers) arlexecute.set_client(client) nodes = findNodes(arlexecute.client) unodes = list(numpy.unique(nodes)) results['nnodes'] = len(unodes) print("Defined %d workers on %d nodes" % (nworkers, results['nnodes'])) print("Workers are: %s" % str(nodes)) else: arlexecute.set_client(use_dask=use_dask) results['nnodes'] = 1 unodes = None vis_list = simulate_list_arlexecute_workflow( 'LOWBD2', frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order=order, format='blockvis', rmax=rmax) print("****** Visibility creation ******") vis_list = arlexecute.persist(vis_list) # Find the best imaging parameters but don't bring the vis_list back here def get_wf(bv): v = convert_blockvisibility_to_visibility(bv) return advise_wide_field(v, guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) wprojection_planes = 1 advice = arlexecute.compute(arlexecute.execute(get_wf)(vis_list[0]), sync=True) npixel = advice['npixels2'] cellsize = advice['cellsize'] if context == 'timeslice': vis_slices = ntimes print("Using timeslice with %d slices" % vis_slices) elif context == '2d': vis_slices = 1 else: context = 'wstack' vis_slices = 5 * advice['vis_slices'] print("Using wstack with %d slices" % vis_slices) results['vis_slices'] = vis_slices results['cellsize'] = cellsize results['npixel'] = npixel gleam_model_list = [ arlexecute.execute(create_low_test_image_from_gleam)( npixel=npixel, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], cellsize=cellsize, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI"), flux_limit=0.3, applybeam=True) for f, freq in enumerate(frequency) ] start = time.time() print("****** Starting GLEAM model creation ******") gleam_model_list = arlexecute.compute(gleam_model_list, sync=True) cmodel = smooth_image(gleam_model_list[centre]) export_image_to_fits(cmodel, "pipelines-timings-arlexecute-gleam_cmodel.fits") end = time.time() results['time create gleam'] = end - start print("Creating GLEAM model took %.2f seconds" % (end - start)) gleam_model_list = arlexecute.scatter(gleam_model_list) vis_list = predict_list_arlexecute_workflow(vis_list, gleam_model_list, vis_slices=vis_slices, context=context) start = time.time() print("****** Starting GLEAM model visibility prediction ******") vis_list = arlexecute.compute(vis_list, sync=True) end = time.time() results['time predict'] = end - start print("GLEAM model Visibility prediction took %.2f seconds" % (end - start)) # Corrupt the visibility for the GLEAM model print("****** Visibility corruption ******") vis_list = corrupt_list_arlexecute_workflow(vis_list, phase_error=1.0) start = time.time() vis_list = arlexecute.compute(vis_list, sync=True) vis_list = arlexecute.scatter(vis_list) end = time.time() results['time corrupt'] = end - start print("Visibility corruption took %.2f seconds" % (end - start)) # Create an empty model image model_list = [ arlexecute.execute(create_image_from_visibility)( vis_list[f], npixel=npixel, cellsize=cellsize, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], polarisation_frame=PolarisationFrame("stokesI")) for f, freq in enumerate(frequency) ] model_list = arlexecute.compute(model_list, sync=True) model_list = arlexecute.scatter(model_list) psf_list = invert_list_arlexecute_workflow(vis_list, model_list, vis_slices=vis_slices, context=context, facets=facets, dopsf=True) start = time.time() print("****** Starting PSF calculation ******") psf, sumwt = arlexecute.compute(psf_list, sync=True)[centre] end = time.time() results['time psf invert'] = end - start print("PSF invert took %.2f seconds" % (end - start)) results['psf_max'] = qa_image(psf).data['max'] results['psf_min'] = qa_image(psf).data['min'] dirty_list = invert_list_arlexecute_workflow(vis_list, model_list, vis_slices=vis_slices, context=context, facets=facets) start = time.time() print("****** Starting dirty image calculation ******") dirty, sumwt = arlexecute.compute(dirty_list, sync=True)[centre] end = time.time() results['time invert'] = end - start print("Dirty image invert took %.2f seconds" % (end - start)) print("Maximum in dirty image is ", numpy.max(numpy.abs(dirty.data)), ", sumwt is ", sumwt) qa = qa_image(dirty) results['dirty_max'] = qa.data['max'] results['dirty_min'] = qa.data['min'] # Create the ICAL pipeline to run 5 major cycles, starting selfcal at cycle 1. A global solution across all # frequencies (i.e. Visibilities) is performed. start = time.time() print("****** Starting ICAL ******") controls = create_calibration_controls() controls['T']['first_selfcal'] = 1 controls['G']['first_selfcal'] = 3 controls['B']['first_selfcal'] = 4 controls['T']['timescale'] = 'auto' controls['G']['timescale'] = 'auto' controls['B']['timescale'] = 1e5 if nfreqwin > 6: nmoment = 3 algorithm = 'mmclean' elif nfreqwin > 2: nmoment = 2 algorithm = 'mmclean' else: nmoment = 1 algorithm = 'msclean' start = time.time() ical_list = ical_list_arlexecute_workflow(vis_list, model_imagelist=model_list, context='wstack', calibration_context='TG', controls=controls, scales=[0, 3, 10], algorithm=algorithm, nmoment=nmoment, niter=1000, fractional_threshold=0.1, threshold=0.1, nmajor=5, gain=0.25, vis_slices=vis_slices, timeslice='auto', global_solution=False, psf_support=64, do_selfcal=True) end = time.time() results['time ICAL graph'] = end - start print("Construction of ICAL graph took %.2f seconds" % (end - start)) # Execute the graph start = time.time() result = arlexecute.compute(ical_list, sync=True) deconvolved, residual, restored = result end = time.time() results['time ICAL'] = end - start print("ICAL graph execution took %.2f seconds" % (end - start)) qa = qa_image(deconvolved[centre]) results['deconvolved_max'] = qa.data['max'] results['deconvolved_min'] = qa.data['min'] export_image_to_fits(deconvolved[centre], "pipelines-timings-arlexecute-ical_deconvolved.fits") qa = qa_image(residual[centre][0]) results['residual_max'] = qa.data['max'] results['residual_min'] = qa.data['min'] export_image_to_fits(residual[centre][0], "pipelines-timings-arlexecute-ical_residual.fits") qa = qa_image(restored[centre]) results['restored_max'] = qa.data['max'] results['restored_min'] = qa.data['min'] export_image_to_fits(restored[centre], "pipelines-timings-arlexecute-ical_restored.fits") # arlexecute.close() end_all = time.time() results['time overall'] = end_all - start_all print("At end, results are {0!r}".format(results)) return results
def simulation(self, args, time_series='wind', band='B2', context='singlesource', vp_directory=''): ra = args.ra declination = args.declination use_radec = args.use_radec == "True" integration_time = args.integration_time time_range = args.time_range time_chunk = args.time_chunk offset_dir = args.offset_dir pbtype = args.pbtype pbradius = args.pbradius rmax = args.rmax flux_limit = args.flux_limit npixel = args.npixel shared_directory = args.shared_directory vp_directory = args.vp_directory # Simulation specific parameters global_pe = numpy.array(args.global_pe) static_pe = numpy.array(args.static_pe) dynamic_pe = args.dynamic_pe seed = args.seed basename = os.path.basename(os.getcwd()) # client = get_dask_Client() use_dask = False arlexecute.set_client(use_dask=use_dask) # Set up details of simulated observation nfreqwin = 1 if band == 'B1': frequency = [0.765e9] elif band == 'B2': frequency = [1.36e9] elif band == 'Ku': frequency = [12.179e9] else: raise ValueError("Unknown band %s" % band) phasecentre = SkyCoord(ra=ra * u.deg, dec=declination * u.deg, frame='icrs', equinox='J2000') bvis_graph = create_standard_mid_simulation_arlexecute_workflow( band, rmax, phasecentre, time_range, time_chunk, integration_time, shared_directory) future_bvis_list = arlexecute.persist(bvis_graph) vis_graph = [ arlexecute.execute(convert_blockvisibility_to_visibility)(bv) for bv in future_bvis_list ] future_vis_list = arlexecute.persist(vis_graph, sync=True) # We need the HWHM of the primary beam, and the location of the nulls HWHM_deg, null_az_deg, null_el_deg = find_pb_width_null( pbtype, frequency) HWHM = HWHM_deg * numpy.pi / 180.0 FOV_deg = 8.0 * 1.36e9 / frequency[0] advice_list = arlexecute.execute(advise_wide_field)( future_vis_list[0], guard_band_image=1.0, delA=0.02, verbose=False) advice = arlexecute.compute(advice_list, sync=True) pb_npixel = 1024 d2r = numpy.pi / 180.0 pb_cellsize = d2r * FOV_deg / pb_npixel cellsize = advice['cellsize'] # Now construct the components original_components, offset_direction = create_simulation_components( context, phasecentre, frequency, pbtype, offset_dir, flux_limit, pbradius * HWHM, pb_npixel, pb_cellsize) vp_list = [ arlexecute.execute(create_image_from_visibility)( bv, npixel=pb_npixel, frequency=frequency, nchan=nfreqwin, cellsize=pb_cellsize, phasecentre=phasecentre, override_cellsize=False) for bv in future_bvis_list ] vp_list = [ arlexecute.execute(create_vp)(vp, pbtype, pointingcentre=phasecentre, use_local=not use_radec) for vp in vp_list ] future_vp_list = arlexecute.persist(vp_list) # Make one image per component future_model_list = [ arlexecute.execute(create_image_from_visibility)( future_vis_list[0], npixel=npixel, frequency=frequency, nchan=nfreqwin, cellsize=cellsize, phasecentre=offset_direction, polarisation_frame=PolarisationFrame("stokesI")) for i, _ in enumerate(original_components) ] a2r = numpy.pi / (3600.0 * 1800) no_error_gtl = None error_gtl = None if time_series == '': global_pointing_error = global_pe static_pointing_error = static_pe pointing_error = dynamic_pe no_error_gtl, error_gtl = \ create_pointing_errors_gaintable_arlexecute_workflow(future_bvis_list, original_components, sub_vp_list=future_vp_list, use_radec=use_radec, pointing_error=a2r * pointing_error, static_pointing_error=a2r * static_pointing_error, global_pointing_error=a2r * global_pointing_error, seed=seed, show=False, basename=basename) elif time_series == 'wind': no_error_gtl, error_gtl = \ create_pointing_errors_gaintable_arlexecute_workflow(future_bvis_list, original_components, sub_vp_list=future_vp_list, use_radec=use_radec, time_series=time_series, time_series_type='precision', seed=seed, show=False, basename=basename) elif time_series == 'gravity': no_error_gtl, error_gtl = \ create_surface_errors_gaintable_arlexecute_workflow(band, future_bvis_list, original_components, vp_directory=vp_directory, use_radec=use_radec, show=False, basename=basename) else: raise ValueError("Unknown type of error %s" % time_series) # Now make all the residual images vis_comp_chunk_dirty_list = \ calculate_residual_from_gaintables_arlexecute_workflow(future_bvis_list, original_components, future_model_list, no_error_gtl, error_gtl) # Add the resulting images error_dirty_list = sum_invert_results_arlexecute( vis_comp_chunk_dirty_list) # Actually compute the graph assembled above error_dirty, sumwt = arlexecute.compute(error_dirty_list, sync=True) return error_dirty, sumwt
dopsf=True, normalize=True, gcgcf=gcfcf) psf_imagelist = arlexecute.compute(psf_imagelist, sync=True) # targetimage = psf_imagelist[0][0] # # print("PSF Image %s" % qa_image(targetimage, context="imaging-fits notebook, using processor %s" % context)) # export_image_to_fits(targetimage, '%s/imaging-fits_PSF_%s.fits' % (storedir, context)) dec_imagelist = deconvolve_list_arlexecute_workflow(result, psf_imagelist, targetimage_list, niter=1000, use_serial_clean=True, fractional_threshold=0.01, scales=[0, 3, 10], algorithm='msclean', gcfcf=gcfcf, threshold=0.1, gain=0.7) dec_imagelist = arlexecute.persist(dec_imagelist) dec_imagelist = arlexecute.compute(dec_imagelist, sync=True) deconvolved = dec_imagelist[0] # export_image_to_fits(deconvolved, # '%s/test_imaging_%s_deconvolved.fits' % (storedir, context)) show_image(deconvolved) plt.title(context) plt.savefig('%s/test_imaging_%s_clean.pdf' % (storedir, context)) residual_imagelist = residual_list_arlexecute_workflow(vt_list, model_imagelist=dec_imagelist, context=context) # residual_imagelist = arlexecute.persist(residual_imagelist) residual_imagelist = arlexecute.compute(residual_imagelist, sync=True) residualed = residual_imagelist[0][0]