def create_unittest_model(vis, model_pol, npixel=None, cellsize=None, nchan=1): advice = advise_wide_field(vis, guard_band_image=2.0, delA=0.02, facets=1, wprojection_planes=1, oversampling_synthesised_beam=4.0) if cellsize is None: cellsize = advice['cellsize'] if npixel is None: npixel = advice['npixels2'] model = create_image_from_visibility(vis, npixel=npixel, cellsize=cellsize, nchan=nchan, polarisation_frame=model_pol) return model
def wproject(vis, npixel_advice, cell_advice, channel, results_dir): """Do w-projected imaging of visibility data. Args: vis (obj): ARL visibility data. npixel_advice (float): number of pixels in output image. cell_advice (float): cellsize in output image. channel (int): channel number to be imaged (affects output filename). results_dir (str): directory to save results. Returns: dirty: dirty image. psf: image of psf. """ try: vis_slices = len(np.unique(vis.time)) print("There are %d timeslices" % vis_slices) # Obtain advice on w-proj parameters: advice = advise_wide_field(vis) # Create a model image: model = create_image_from_visibility(vis, cellsize=cell_advice, npixel=npixel_advice, polarisation_frame=PolarisationFrame('stokesIQUV')) # Weight the visibilities: vis, _, _ = weight_visibility(vis, model) # Create a dirty image: dirty, sumwt = create_invert_graph([vis], model, kernel='wprojection', wstep=advice['w_sampling_primary_beam'], oversampling=2).compute() # Create the psf: psf, sumwt = create_invert_graph([vis], model, dopsf=True, kernel='wprojection', wstep=advice['w_sampling_primary_beam'], oversampling=2).compute() # Save to disk: export_image_to_fits(dirty, '%s/imaging_dirty_WProj-%s.fits' % (results_dir, channel)) export_image_to_fits(psf, '%s/imaging_psf_WProj-%s.fits' % (results_dir, channel)) except: print("Unexpected error:", sys.exc_info()[0]) raise return dirty, psf
def uv_advice(vis, uv_cutoff, pixels_per_beam): """Advise on the imaging parameters for fully-sampled images. Args: vis (obj): ARL visibility data. uv_cutoff (float): maximum intended uv-coordinate. pixels_per_beam (float): number of pixel samples across the beam. Returns: npixel_advice: advised number of pixels. cell_advice: advised cellsize. """ # Find the maximum uv-distance: uv_dist = np.sqrt(vis.data['uvw'][:, 0]**2 + vis.data['uvw'][:, 1]**2) uv_max = np.max(uv_dist) print("Maximum uv-distance:", uv_max) # Calculate the angular resolution: print("Observing Frequency, MHz:", vis.frequency[0] / 1e6) lambda_meas = c.value / vis.frequency[0] print("") print("Angular resolution, FWHM:", lambda_meas / (uv_cutoff * lambda_meas)) angres_arcmin = 60.0 * (180.0 / np.pi) * (1.0 / uv_max) angres_arcsec = 60.0 * 60.0 * (180.0 / np.pi) * (1.0 / uv_max) print("arcmin", angres_arcmin) print("arcsec", angres_arcsec) print("") # Calculate the cellsize: cell_advice = (angres_arcmin / (60.0 * pixels_per_beam)) * (np.pi / 180.0) # Determine the npixel size required: pixel_options = np.array([512, 1024, 2048, 4096, 8192]) pb_fov = pixel_options * cell_advice * (180.0 / np.pi) advice = advise_wide_field(vis) npixel_advice = pixel_options[np.argmax( pb_fov > advice['primary_beam_fov'] * (180.0 / np.pi) * 2.0)] print("Recommended npixels/cellsize:", npixel_advice, "/", cell_advice) return npixel_advice, cell_advice
phasecentre=phasecentre, rmax=rmax, zerow=True, format='vis', order=order) log.info('rmax is %.1f (m)' % (rmax)) log.info('Observing times %s' % (times)) log.info("Observing frequencies %s Hz" % (frequency)) log.info("Number of pixels %d" % (npixel)) log.info("Cellsize = %.6f radians" % (cellsize)) vis_graph_list = compute_list(c, vis_graph_list) advice = advise_wide_field(vis_graph_list[0], guard_band_image=4.0, delA=0.02, wprojection_planes=1) vis_slices = advice['vis_slices'] npixel = advice['npixels2'] cellsize = advice['cellsize'] future = c.compute( delayed(create_low_test_image_from_gleam)( vis_graph_list[0], npixel=npixel, nchan=1, cellsize=cellsize, frequency=[frequency[0]], channel_bandwidth=[channel_bandwidth[0]], polarisation_frame=PolarisationFrame("stokesI"))) model = future.result()
def trial_case(results, seed=180555, context='wstack', nworkers=8, threads_per_worker=1, processes=True, order='frequency', nfreqwin=7, ntimes=3, rmax=750.0, facets=1, wprojection_planes=1, parallelism=16): npol = 1 if parallelism == -1: parallelism = None np.random.seed(seed) results['seed'] = seed start_all = time.time() results['context'] = context results['hostname'] = socket.gethostname() results['git_hash'] = git_hash() results['epoch'] = time.strftime("%Y-%m-%d %H:%M:%S") zerow = False print("Context is %s" % context) results['nworkers'] = nworkers results['threads_per_worker'] = threads_per_worker results['processes'] = processes results['order'] = order results['nfreqwin'] = nfreqwin results['ntimes'] = ntimes results['rmax'] = rmax results['facets'] = facets results['wprojection_planes'] = wprojection_planes print("At start, configuration is {0!r}".format(results)) conf = SparkConf().setMaster("local[4]") sc = SparkContext(conf=conf) sc.addFile("./LOWBD2.csv") sc.addFile("./sc256") sc.addFile("./SKA1_LOW_beam.fits") # sc.addFile("./GLEAM_EGC.fits") frequency = np.linspace(0.8e8, 1.2e8, nfreqwin) if nfreqwin > 1: channel_bandwidth = np.array(nfreqwin * [frequency[1] - frequency[0]]) else: channel_bandwidth = np.array([1e6]) times = np.linspace(-np.pi / 3.0, np.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') config = 'LOWBD2' polarisation_frame = PolarisationFrame("stokesI") #add broadcast value for telescope_management_data telescope_management = telescope_management_handle_locality( sc, config, rmax) telescope_management_data = telescope_data_generate_locality( telescope_management, times=times, frequencys=frequency, channel_bandwidth=channel_bandwidth, weight=1.0, phasecentre=phasecentre, polarisation_frame=polarisation_frame, order=order) key, meta = next(telescope_management_data) print(key) print(meta["frequencys"]) broadcast_tele = sc.broadcast(telescope_management_data) vis_graph_list = create_simulate_vis_graph( sc, 'LOWBD2', frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order=order, format='blockvis', rmax=rmax) print("****** Visibility creation ******") wprojection_planes = 1 vis = None for v in vis_graph_list.collect(): if v[0][2] == 0: vis = v[1] break advice = advise_wide_field(convert_blockvisibility_to_visibility(vis), guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) kernel = advice['kernel'] npixel = advice['npixels2'] cellsize = advice['cellsize'] print(cellsize) print(npixel) if context == 'timeslice' or context == 'facets_timeslice': vis_slices = ntimes elif context == '2d' or context == 'facets': vis_slices = 1 kernel = '2d' else: vis_slices = advice['vis_slices'] # vis_slices = 4 results['vis_slices'] = vis_slices results['cellsize'] = cellsize results['npixel'] = npixel print(vis_slices) gleam_model_graph = create_low_test_image_from_gleam_spark( sc=sc, npixel=npixel, frequency=frequency, channel_bandwidth=channel_bandwidth, cellsize=cellsize, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI"), flux_limit=0.1, applybeam=False) start = time.time() print("****** Starting GLEAM model creation ******") # gleam_model_graph.cache() # gleam_model_graph.collect() print("****** Finishing GLEAM model creation *****") end = time.time() results['time create gleam'] = end - start print("Creating GLEAM model took %.2f seconds" % (end - start)) vis_graph_list = create_predict_graph_first(gleam_model_graph, broadcast_tele, vis_slices=vis_slices, facets=facets, context=context, kernel=kernel, nfrequency=nfreqwin) start = time.time() print("****** Starting GLEAM model visibility prediction ******") # vis_graph_list.cache() # vis_graph_list.collect() end = time.time() results['time predict'] = end - start print("GLEAM model Visibility prediction took %.2f seconds" % (end - start)) # Correct the visibility for the GLEAM model print("****** Visibility corruption ******") vis_graph_list = create_corrupt_vis_graph(vis_graph_list, phase_error=1.0) start = time.time() vis_graph_list.cache() vis_graph_list.collect() end = time.time() results['time corrupt'] = end - start print("Visibility corruption took %.2f seconds" % (end - start)) # Create an empty model image model_graph = create_empty_image( vis_graph_list, npixel=npixel, cellsize=cellsize, frequency=frequency, channel_bandwidth=channel_bandwidth, polarisation_frame=PolarisationFrame("stokesI")) model_graph.cache() model_graph.collect() # psf_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, # dopsf=True, kernel=kernel) # # start = time.time() # print("****** Starting PSF calculation ******") # psfs = psf_graph.collect() # psf = None # for i in psfs: # if i[0][2] == 0: # psf = i[1][0] # end = time.time() # results['time psf invert'] = end - start # print("PSF invert took %.2f seconds" % (end - start)) # # results['psf_max'] = qa_image(psf).data['max'] # results['psf_min'] = qa_image(psf).data['min'] # # print(results['psf_max']) # print(results['psf_min']) # # # dirty_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, # kernel=kernel) # # start = time.time() # print("****** Starting dirty image calculation ******") # dirtys = dirty_graph.collect() # dirty, sumwt = (None, None) # for i in dirtys: # if i[0][2] == 0: # dirty, sumwt = i[1] # # print(psf.shape) # print(dirty.shape) # end = time.time() # results['time invert'] = end - start # print("Dirty image invert took %.2f seconds" % (end - start)) # print("Maximum in dirty image is ", numpy.max(numpy.abs(dirty.data)), ", sumwt is ", sumwt) # qa = qa_image(dirty) # results['dirty_max'] = qa.data['max'] # results['dirty_min'] = qa.data['min'] # # start = time.time() # print("***** write data to file *****") # export_images_to_fits(psfs, nfreqwin, "psf.fits") # export_images_to_fits(dirtys, nfreqwin, "dirty.fits") # end = time.time() # results['time write'] = end - start print("****** Starting ICAL ******" + " parallelism = " + str(parallelism)) start = time.time() residual_graph, deconvolve_graph, restore_graph = create_ical_graph_locality( sc, vis_graph_list, model_graph, nchan=nfreqwin, context=context, vis_slices=vis_slices, facets=facets, first_selfcal=1, algorithm='msclean', nmoments=3, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], threshold=0.1, nmajor=5, gain=0.7, timeslice='auto', global_solution=True, window_shape='quarter', parallelism=parallelism) deconvolveds = deconvolve_graph.collect() residuals = residual_graph.collect() restores = restore_graph.collect() end = time.time() results['time ICAL'] = end - start print("ICAL graph execution took %.2f seconds" % (end - start)) residual = None for i in residuals: if i[0][2] == 0: residual = i[1][0] print(residual) qa = qa_image(residual) results['residual_max'] = qa.data['max'] results['residual_min'] = qa.data['min'] export_images_to_fits(residuals, nfreqwin, "pipelines-timings-delayed-ical_residual.fits") deconvolve = None for i in deconvolveds: if i[0][2] == 0: deconvolve = i[1] print(deconvolve) qa = qa_image(deconvolve) results['deconvolved_max'] = qa.data['max'] results['deconvolved_min'] = qa.data['min'] export_images_to_fits(deconvolveds, nfreqwin, "pipelines-timings-delayed-deconvolved.fits", has_sumwt=False) restore = None for i in restores: if i[0][2] == 0: restore = i[1] print(restore) qa = qa_image(restore) results['restored_max'] = qa.data['max'] results['restored_min'] = qa.data['min'] export_images_to_fits(restores, nfreqwin, "pipelines-timings-delayed-restored.fits", has_sumwt=False) end_all = time.time() results['time overall'] = end_all - start_all print("At end, results are {0!r}".format(results)) sc.stop() return results
def trial_case(results, seed=180555, context='wstack', nworkers=8, threads_per_worker=1, processes=True, order='frequency', nfreqwin=7, ntimes=3, rmax=750.0, facets=1, wprojection_planes=1): """ Single trial for performance-timings Simulates visibilities from GLEAM including phase errors Makes dirty image and PSF Runs ICAL pipeline The results are in a dictionary: 'context': input - a string describing concisely the purpose of the test 'time overall', overall execution time (s) 'time create gleam', time to create GLEAM prediction graph 'time predict', time to execute GLEAM prediction graph 'time corrupt', time to corrupt data 'time invert', time to make dirty image 'time psf invert', time to make PSF 'time ICAL graph', time to create ICAL graph 'time ICAL', time to execute ICAL graph 'context', type of imaging e.g. 'wstack' 'nworkers', number of workers to create 'threads_per_worker', 'nnodes', Number of nodes, 'processes', 'order', Ordering of data 'nfreqwin', Number of frequency windows in simulation 'ntimes', Number of hour angles in simulation 'rmax', Maximum radius of stations used in simulation (m) 'facets', Number of facets in deconvolution and imaging 'wprojection_planes', Number of wprojection planes 'vis_slices', Number of visibility slices (per Visibbility) 'npixel', Number of pixels in image 'cellsize', Cellsize in radians 'seed', Random number seed 'dirty_max', Maximum in dirty image 'dirty_min', Minimum in dirty image 'psf_max', 'psf_min', 'restored_max', 'restored_min', 'deconvolved_max', 'deconvolved_min', 'residual_max', 'residual_min', 'git_info', GIT hash (not definitive since local mods are possible) :param results: Initial state :param seed: Random number seed (used in gain simulations) :param context: imaging context :param context: Type of context: '2d'|'timeslice'|'wstack' :param nworkers: Number of dask workers to use :param threads_per_worker: Number of threads per worker :param processes: Use processes instead of threads 'processes'|'threads' :param order: See create_simulate_vis_graph :param nfreqwin: See create_simulate_vis_graph :param ntimes: See create_simulate_vis_graph :param rmax: See create_simulate_vis_graph :param facets: Number of facets to use :param wprojection_planes: Number of wprojection planes to use :param kwargs: :return: results dictionary """ def check_workers(client, nworkers_initial): nworkers_final = len(client.scheduler_info()['workers']) assert nworkers_final == nworkers_initial, "Started %d workers, only %d at end" % \ (nworkers_initial, nworkers_final) numpy.random.seed(seed) results['seed'] = seed start_all = time.time() results['context'] = context results['hostname'] = socket.gethostname() results['git_hash'] = git_hash() results['epoch'] = time.strftime("%Y-%m-%d %H:%M:%S") zerow = False print("Context is %s" % context) results['nworkers'] = nworkers results['threads_per_worker'] = threads_per_worker results['processes'] = processes results['order'] = order results['nfreqwin'] = nfreqwin results['ntimes'] = ntimes results['rmax'] = rmax results['facets'] = facets results['wprojection_planes'] = wprojection_planes print("At start, configuration is {0!r}".format(results)) # Parameters determining scale frequency = numpy.linspace(0.8e8, 1.2e8, nfreqwin) if nfreqwin > 1: channel_bandwidth = numpy.array(nfreqwin * [frequency[1] - frequency[0]]) else: channel_bandwidth = numpy.array([1e6]) times = numpy.linspace(-numpy.pi / 3.0, numpy.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') vis_graph_list = create_simulate_vis_graph( 'LOWBD2', frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order=order, format='blockvis', rmax=rmax) client = get_dask_Client(n_workers=nworkers, threads_per_worker=threads_per_worker, processes=processes) nworkers_initial = len(client.scheduler_info()['workers']) check_workers(client, nworkers_initial) results['nnodes'] = len(numpy.unique(findNodes(client))) print("Defined %d workers on %d nodes" % (nworkers, results['nnodes'])) print("****** Visibility creation ******") vis_graph_list = compute_list(client, vis_graph_list) print("After creating vis_graph_list", client) # Find the best imaging parameters. wprojection_planes = 1 advice = advise_wide_field(convert_blockvisibility_to_visibility( vis_graph_list[0]), guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) kernel = advice['kernel'] npixel = advice['npixels2'] cellsize = advice['cellsize'] if context == 'timeslice': vis_slices = ntimes elif context == '2d': vis_slices = 1 kernel = '2d' else: vis_slices = advice['vis_slices'] results['vis_slices'] = vis_slices results['cellsize'] = cellsize results['npixel'] = npixel gleam_model_graph = [ delayed(create_low_test_image_from_gleam)( npixel=npixel, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], cellsize=cellsize, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI"), flux_limit=0.1, applybeam=True) for f, freq in enumerate(frequency) ] start = time.time() print("****** Starting GLEAM model creation ******") gleam_model_graph = compute_list(client, gleam_model_graph) cmodel = smooth_image(gleam_model_graph[0]) export_image_to_fits(cmodel, "pipelines-timings-delayed-gleam_cmodel.fits") end = time.time() results['time create gleam'] = end - start print("Creating GLEAM model took %.2f seconds" % (end - start)) vis_graph_list = create_predict_graph(vis_graph_list, gleam_model_graph, vis_slices=51, context=context, kernel=kernel) start = time.time() print("****** Starting GLEAM model visibility prediction ******") vis_graph_list = compute_list(client, vis_graph_list) end = time.time() results['time predict'] = end - start print("After prediction", client) print("GLEAM model Visibility prediction took %.2f seconds" % (end - start)) # Corrupt the visibility for the GLEAM model print("****** Visibility corruption ******") vis_graph_list = create_corrupt_vis_graph(vis_graph_list, phase_error=1.0) start = time.time() vis_graph_list = compute_list(client, vis_graph_list) end = time.time() results['time corrupt'] = end - start print("After corrupt", client) print("Visibility corruption took %.2f seconds" % (end - start)) # Create an empty model image model_graph = [ delayed(create_image_from_visibility)( vis_graph_list[f], npixel=npixel, cellsize=cellsize, frequency=[frequency[f]], channel_bandwidth=[channel_bandwidth[f]], polarisation_frame=PolarisationFrame("stokesI")) for f, freq in enumerate(frequency) ] model_graph = client.compute(model_graph, sync=True) psf_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, dopsf=True, kernel=kernel) start = time.time() print("****** Starting PSF calculation ******") psf, sumwt = client.compute(psf_graph, sync=True)[0] check_workers(client, nworkers_initial) end = time.time() results['time psf invert'] = end - start print("PSF invert took %.2f seconds" % (end - start)) print("After psf", client) results['psf_max'] = qa_image(psf).data['max'] results['psf_min'] = qa_image(psf).data['min'] dirty_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, kernel=kernel) start = time.time() print("****** Starting dirty image calculation ******") dirty, sumwt = client.compute(dirty_graph, sync=True)[0] check_workers(client, nworkers_initial) end = time.time() print("After dirty image", client) results['time invert'] = end - start print("Dirty image invert took %.2f seconds" % (end - start)) print("Maximum in dirty image is ", numpy.max(numpy.abs(dirty.data)), ", sumwt is ", sumwt) qa = qa_image(dirty) results['dirty_max'] = qa.data['max'] results['dirty_min'] = qa.data['min'] # Create the ICAL pipeline to run 5 major cycles, starting selfcal at cycle 1. A global solution across all # frequencies (i.e. Visibilities) is performed. start = time.time() print("****** Starting ICAL ******") start = time.time() ical_graph = create_ical_pipeline_graph(vis_graph_list, model_graph=model_graph, context=context, do_selfcal=1, nchan=nfreqwin, vis_slices=vis_slices, algorithm='mmclean', nmoments=3, niter=1000, fractional_threshold=0.1, scales=[0, 3, 10], threshold=0.1, nmajor=5, gain=0.7, timeslice='auto', global_solution=True, window_shape='quarter') end = time.time() results['time ICAL graph'] = end - start print("Construction of ICAL graph took %.2f seconds" % (end - start)) # Execute the graph start = time.time() result = client.compute(ical_graph, sync=True) deconvolved, residual, restored = result check_workers(client, nworkers_initial) end = time.time() print("After ICAL", client) results['time ICAL'] = end - start print("ICAL graph execution took %.2f seconds" % (end - start)) qa = qa_image(deconvolved[0]) results['deconvolved_max'] = qa.data['max'] results['deconvolved_min'] = qa.data['min'] export_image_to_fits(deconvolved[0], "pipelines-timings-delayed-ical_deconvolved.fits") qa = qa_image(residual[0][0]) results['residual_max'] = qa.data['max'] results['residual_min'] = qa.data['min'] export_image_to_fits(residual[0][0], "pipelines-timings-delayed-ical_residual.fits") qa = qa_image(restored[0]) results['restored_max'] = qa.data['max'] results['restored_min'] = qa.data['min'] export_image_to_fits(restored[0], "pipelines-timings-delayed-ical_restored.fits") # client.shutdown() end_all = time.time() results['time overall'] = end_all - start_all print("At end, results are {0!r}".format(results)) return results
ntimes=11 rmax=300.0 frequency=numpy.linspace(0.8e8,1.2e8,nfreqwin) channel_bandwidth=numpy.array(nfreqwin*[frequency[1]-frequency[0]]) times = numpy.linspace(-numpy.pi/3.0, numpy.pi/3.0, ntimes) phasecentre=SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') lowcore = create_named_configuration('LOWBD2-CORE', rmax=rmax) block_vis = create_blockvisibility(lowcore, times, frequency=frequency, channel_bandwidth=channel_bandwidth, weight=1.0, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI")) wprojection_planes=1 advice=advise_wide_field(block_vis, guard_band_image=4.0, delA=0.02, wprojection_planes=wprojection_planes) vis_slices = advice['vis_slices'] npixel=advice['npixels2'] cellsize=advice['cellsize'] gleam_model = create_low_test_image_from_gleam(npixel=npixel, frequency=frequency, channel_bandwidth=channel_bandwidth, cellsize=cellsize, phasecentre=phasecentre, flux_limit = 1.0, applybeam=True) predicted_vis = predict_function(block_vis, gleam_model, vis_slices=51, context='wstack') #print("np.sum(predicted_vis.data): ", numpy.sum(predicted_vis.data['vis'])) block_vis=convert_visibility_to_blockvisibility(predicted_vis) #print("np.sum(block_vis.data): ", numpy.sum(block_vis.data['vis']))
block_vis = create_blockvisibility( lowcore, times, frequency=frequency, channel_bandwidth=channel_bandwidth, weight=1.0, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI")) # In[3]: wprojection_planes = 1 facets = 4 advice = advise_wide_field(block_vis, guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) vis_slices = advice['vis_slices'] npixel = advice['npixels2'] cellsize = advice['cellsize'] # In[4]: gleam_model = create_low_test_image_from_gleam( npixel=npixel, frequency=frequency, channel_bandwidth=channel_bandwidth, cellsize=cellsize, phasecentre=phasecentre, applybeam=True,
def trial_case(results, seed=180555, context='wstack', nworkers=8, threads_per_worker=1, processes=True, order='frequency', nfreqwin=7, ntimes=3, rmax=750.0, facets=1, wprojection_planes=1, parallelism=16): npol = 1 if parallelism == -1: parallelism = None np.random.seed(seed) results['seed'] = seed start_all = time.time() results['context'] = context results['hostname'] = socket.gethostname() results['git_hash'] = git_hash() results['epoch'] = time.strftime("%Y-%m-%d %H:%M:%S") zerow = False print("Context is %s" % context) results['nworkers'] = nworkers results['threads_per_worker'] = threads_per_worker results['processes'] = processes results['order'] = order results['nfreqwin'] = nfreqwin results['ntimes'] = ntimes results['rmax'] = rmax results['facets'] = facets results['wprojection_planes'] = wprojection_planes print("At start, configuration is {0!r}".format(results)) conf = SparkConf().setMaster("local[16]") sc = SparkContext(conf=conf) sc.addFile("./LOWBD2.csv") sc.addFile("./sc128") sc.addFile("./SKA1_LOW_beam.fits") # sc.addFile("./GLEAM_EGC.fits") frequency = np.linspace(0.8e8, 1.2e8, nfreqwin) if nfreqwin > 1: channel_bandwidth = np.array(nfreqwin * [frequency[1] - frequency[0]]) else: channel_bandwidth = np.array([1e6]) times = np.linspace(-np.pi / 3.0, np.pi / 3.0, ntimes) phasecentre = SkyCoord(ra=+30.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000') #vis_graph_list = create_simulate_vis_graph(sc, 'LOWBD2', frequency=frequency, channel_bandwidth=channel_bandwidth, times=times, phasecentre=phasecentre, order=order, format='blockvis', rmax=rmax) config='LOWBD2' polarisation_frame=PolarisationFrame("stokesI") telescope_management = telescope_management_handle(sc, config, rmax) telescope_management_data = telescope_data_generate(telescope_management, times=times, frequencys=frequency, channel_bandwidth=channel_bandwidth, weight=1.0, phasecentre=phasecentre,polarisation_frame=polarisation_frame, order=order) key,meta=next(telescope_management_data) print(key) print(meta["frequencys"]) broadcast_tele=sc.broadcast(telescope_management_data) # only get one visibility to get the advice print("****** Visibility creation ******") times_meta = meta["times"] frequencys_meta = meta["frequencys"] channel_bandwidth_meta = meta["channel_bandwidth"] phasecentre_meta = meta["phasecentre"] polarisation_frame_meta = meta["polarisation_frame"] weight_meta = meta["weight"] conf_meta = meta["conf"] vis=create_blockvisibility(conf_meta, times=times_meta, frequency=frequencys_meta, channel_bandwidth=channel_bandwidth_meta, weight=weight_meta, phasecentre=phasecentre_meta, polarisation_frame=polarisation_frame_meta) #wprojection_planes = 1 #vis = None #for v in vis_graph_list.collect(): # if v[0][2] == 0: # vis = v[1] # break advice = advise_wide_field(convert_blockvisibility_to_visibility(vis), guard_band_image=6.0, delA=0.02, facets=facets, wprojection_planes=wprojection_planes, oversampling_synthesised_beam=4.0) kernel = advice['kernel'] npixel = advice['npixels2'] cellsize = advice['cellsize'] print(cellsize) print(npixel) if context == 'timeslice' or context == 'facets_timeslice': vis_slices = ntimes elif context == '2d' or context == 'facets': vis_slices = 1 kernel = '2d' else: vis_slices = advice['vis_slices'] # vis_slices = 4 results['vis_slices'] = vis_slices results['cellsize'] = cellsize results['npixel'] = npixel print(vis_slices) gleam_model_graph = create_low_test_image_from_gleam_spark(sc=sc, npixel=npixel, frequency=frequency, channel_bandwidth=channel_bandwidth, cellsize=cellsize, phasecentre=phasecentre, polarisation_frame=PolarisationFrame("stokesI"), flux_limit=0.1, applybeam=False) start = time.time() print("****** Starting GLEAM model creation ******") # gleam_model_graph.cache() # gleam_model_graph.collect() print("****** Finishing GLEAM model creation *****") end = time.time() results['time create gleam'] = end - start print("Creating GLEAM model took %.2f seconds" % (end - start)) vis_graph_list = create_predict_graph_first(gleam_model_graph, broadcast_tele, vis_slices=vis_slices, facets=facets, context=context , kernel=kernel, nfrequency=nfreqwin) start = time.time() print("****** Starting GLEAM model visibility prediction ******") # vis_graph_list.cache() # vis_graph_list.collect() viscount=vis_graph_list.count() print(" Got visibility of predict %3d ",viscount) end = time.time() results['time predict'] = end - start print("GLEAM model Visibility prediction took %.2f seconds" % (end - start)) # Correct the visibility for the GLEAM model print("****** Visibility corruption ******") vis_graph_list = create_corrupt_vis_graph(vis_graph_list, phase_error=1.0) viscount2=vis_graph_list.count() print(" Got visibility of corrupt %3d ",viscount2) start = time.time() vis_graph_list.cache() #vis_graph_list.collect() end = time.time() results['time corrupt'] = end - start print("Visibility corruption took %.2f seconds" % (end - start)) # Create an empty model image model_graph = create_empty_image(vis_graph_list, npixel=npixel, cellsize=cellsize, frequency=frequency, channel_bandwidth=channel_bandwidth, polarisation_frame=PolarisationFrame("stokesI")) model_graph.cache() print(" Got model graph %3d ",model_graph.count()) psf_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, dopsf=True, kernel=kernel) start = time.time() print("****** Starting PSF calculation ******") psfs = psf_graph.collect() # psf = None for i in psfs: if i[0][2] == 0: psf = i[1][0] end = time.time() results['time psf invert'] = end - start print("PSF invert took %.2f seconds" % (end - start)) results['psf_max'] = qa_image(psf).data['max'] results['psf_min'] = qa_image(psf).data['min'] # print(results['psf_max']) print(results['psf_min']) dirty_graph = create_invert_graph(vis_graph_list, model_graph, vis_slices=vis_slices, context=context, facets=facets, kernel=kernel) start = time.time() print("****** Starting dirty image calculation ******") dirtys = dirty_graph.collect() dirty, sumwt = (None, None) for i in dirtys: if i[0][2] == 0: dirty, sumwt = i[1] print(psf.shape) print(dirty.shape) end = time.time() results['time invert'] = end - start print("Dirty image invert took %.2f seconds" % (end - start)) print("Maximum in dirty image is ", numpy.max(numpy.abs(dirty.data)), ", sumwt is ", sumwt) qa = qa_image(dirty) results['dirty_max'] = qa.data['max'] results['dirty_min'] = qa.data['min'] # start = time.time() print("***** write data to file *****") export_images_to_fits(psfs, nfreqwin, "psf.fits") export_images_to_fits(dirtys, nfreqwin, "dirty.fits") show_image(psf.data[0, 0, ...], 'hello_psf', 1) show_image(dirty.data[0, 0, ...], 'hello_dirty', 1) end = time.time() results['time write'] = end - start end_all = time.time() results['time overall'] = end_all - start_all print("At end, results are {0!r}".format(results)) sc.stop() return results