controls = create_calibration_controls()

    controls['T']['first_selfcal'] = 1
    controls['G']['first_selfcal'] = 3
    controls['B']['first_selfcal'] = 4

    controls['T']['timescale'] = 'auto'
    controls['G']['timescale'] = 'auto'
    controls['B']['timescale'] = 1e5

    pp.pprint(controls)

    # In[ ]:

    future_vislist = arlexecute.scatter(vislist)
    ntimes = len(vislist[0].time)
    ical_list = ical_arlexecute(future_vislist,
                                model_imagelist=model_list,
                                context='wstack',
                                calibration_context='TG',
                                controls=controls,
                                scales=[0, 3, 10],
                                algorithm='mmclean',
                                nmoment=3,
                                niter=1000,
                                fractional_threshold=0.1,
                                threshold=0.1,
                                nmajor=5,
                                gain=0.25,
                                deconvolve_facets=8,
Exemple #2
0
    gleam_model = [
        arlexecute.execute(create_low_test_image_from_gleam)(
            npixel=npixel,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bandwidth[f]],
            cellsize=cellsize,
            phasecentre=phasecentre,
            polarisation_frame=PolarisationFrame("stokesI"),
            flux_limit=1.0,
            applybeam=True) for f, freq in enumerate(frequency)
    ]
    log.info('About to make GLEAM model')
    gleam_model = arlexecute.compute(gleam_model, sync=True)
    gleam_skymodel = SkyModel(images=gleam_model)
    export_skymodel_to_hdf5(gleam_skymodel, 'gleam_simulation_skymodel.hdf')
    future_gleam_model = arlexecute.scatter(gleam_model)

    # In[ ]:

    log.info('About to run predict to get predicted visibility')
    future_vis_graph = arlexecute.scatter(vis_list)
    predicted_vislist = predict_workflow(future_vis_graph,
                                         gleam_model,
                                         context='wstack',
                                         vis_slices=vis_slices)
    corrupted_vislist = corrupt_workflow(predicted_vislist, phase_error=1.0)
    log.info('About to run corrupt to get corrupted visibility')
    corrupted_vislist = arlexecute.compute(corrupted_vislist, sync=True)

    export_blockvisibility_to_hdf5(corrupted_vislist,
                                   'gleam_simulation_vislist.hdf')
def main():
    """Run the workflow."""
    init_logging()

    LOG.info("Starting imaging-pipeline")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
       PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)
    try: 	
       with open(PARFILE, "r") as par_file:
             jspar = json.load(par_file)       
    except AssertionError as error:
       LOG.critical('ERROR %s', error)
       return

    # We will use dask
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    # Import visibility list from HDF5 file
    vis_list = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))

    # Now read the BlockVisibilities constructed using a model drawn from GLEAM
    predicted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))
    corrupted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))

# Reproduce parameters from the visibility data
    ntimes = vis_list[0].nvis

    phasecentre = vis_list[0].phasecentre
    print(phasecentre)
    polframe = vis_list[0].polarisation_frame.type
    LOG.info("Polarisation Frame of vis_list: %s", polframe)

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image   = jspar["advice"]["guard_band_image"]
    delA               = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0], guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1], guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    npixel = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])
    
# Recovering frequencies
    fstart = vis_list[0].frequency
    fend = vis_list[-1].frequency
    num_freq_win = len(vis_list)
    frequency = numpy.linspace(fstart, fend, num_freq_win)

# Recovering bandwidths
    channel_bandwidth = numpy.array(num_freq_win * [vis_list[1].frequency - vis_list[0].frequency])
    
    # Get the LSM. This is currently blank.
    model_list = [
        arlexecute.execute(create_image_from_visibility)(
            vis_list[f],
            npixel=npixel,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bandwidth[f]],
            cellsize=cellsize,
            phasecentre=phasecentre,
            polarisation_frame=PolarisationFrame(polframe))
        for f, freq in enumerate(frequency)
    ]
    # future_predicted_vislist = arlexecute.scatter(predicted_vislist)

    # Create and execute graphs to make the dirty image and PSF
    # LOG.info('About to run invert to get dirty image')
    # dirty_list = invert_component(future_predicted_vislist, model_list,
    #                               context='wstack',
    #                               vis_slices=vis_slices, dopsf=False)
    # dirty_list = arlexecute.compute(dirty_list, sync=True)

    # LOG.info('About to run invert to get PSF')
    # psf_list = invert_component(future_predicted_vislist, model_list,
    #                             context='wstack',
    #                             vis_slices=vis_slices, dopsf=True)
    # psf_list = arlexecute.compute(psf_list, sync=True)

    # Now deconvolve using msclean
    # LOG.info('About to run deconvolve')
    # deconvolve_list, _ = deconvolve_component(
    #     dirty_list, psf_list,
    #     model_imagelist=model_list,
    #     deconvolve_facets=8,
    #     deconvolve_overlap=16,
    #     deconvolve_taper='tukey',
    #     scales=[0, 3, 10],
    #     algorithm='msclean',
    #     niter=1000,
    #     fractional_threshold=0.1,
    #     threshold=0.1,
    #     gain=0.1,
    #     psf_support=64)
    # deconvolved = arlexecute.compute(deconvolve_list, sync=True)

    LOG.info('About to run continuum imaging')
    continuum_imaging_list = continuum_imaging_arlexecute(
        predicted_vislist,
        model_imagelist		= model_list,
        context			= jspar["processing"]["continuum_imaging"]["context"] , #'wstack',
        vis_slices		= vis_slices,
        scales			= jspar["processing"]["continuum_imaging"]["scales"],             #[0, 3, 10],
        algorithm		= jspar["processing"]["continuum_imaging"]["algorithm"],          #'mmclean',
        nmoment			= jspar["processing"]["continuum_imaging"]["nmoment"],            #3,
        niter			= jspar["processing"]["continuum_imaging"]["niter"],		  #1000,
        fractional_threshold	= jspar["processing"]["continuum_imaging"]["fractional_threshold"], #0.1,
        threshold		= jspar["processing"]["continuum_imaging"]["threshold"], 	  #0.1,
        nmajor			= jspar["processing"]["continuum_imaging"]["nmajor"], 		  #5,
        gain			= jspar["processing"]["continuum_imaging"]["gain"],               #0.25,
        deconvolve_facets	= jspar["processing"]["continuum_imaging"]["deconvolve_facets"],  #8,
        deconvolve_overlap	= jspar["processing"]["continuum_imaging"]["deconvolve_overlap"], #16,
        deconvolve_taper	= jspar["processing"]["continuum_imaging"]["deconvolve_taper"],   #'tukey',
        psf_support		= jspar["processing"]["continuum_imaging"]["psf_support"] )        #64)
    result = arlexecute.compute(continuum_imaging_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image - no selfcal'))
    print(qa_image(restored, context='Restored clean image - no selfcal'))
    export_image_to_fits(restored,
                         '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_restored"]))

    print(qa_image(residual[0], context='Residual clean image - no selfcal'))
    export_image_to_fits(residual[0],
                         '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_residual"]))

    controls = create_calibration_controls()

    controls['T']['first_selfcal'] = jspar["processing"]["controls"]["T"]["first_selfcal"]
    controls['G']['first_selfcal'] = jspar["processing"]["controls"]["G"]["first_selfcal"]
    controls['B']['first_selfcal'] = jspar["processing"]["controls"]["B"]["first_selfcal"]

    controls['T']['timescale'] = jspar["processing"]["controls"]["T"]["timescale"]
    controls['G']['timescale'] = jspar["processing"]["controls"]["G"]["timescale"]
    controls['B']['timescale'] = jspar["processing"]["controls"]["B"]["timescale"]

    PP.pprint(controls)

    future_corrupted_vislist = arlexecute.scatter(corrupted_vislist)
    ical_list = ical_arlexecute(future_corrupted_vislist,
        model_imagelist		= model_list,
        context			= jspar["processing"]["ical"]["context"] , 		#'wstack',
        calibration_context	= jspar["processing"]["ical"]["calibration_context"] , 	#'TG',
        controls		= controls,
        vis_slices		= ntimes,
        scales			= jspar["processing"]["ical"]["scales"],             	#[0, 3, 10],
        timeslice		= jspar["processing"]["ical"]["timeslice"],          	#'auto',
        algorithm		= jspar["processing"]["ical"]["algorithm"],          	#'mmclean',
        nmoment			= jspar["processing"]["ical"]["nmoment"],            	#3,
        niter			= jspar["processing"]["ical"]["niter"],		  	#1000,
        fractional_threshold	= jspar["processing"]["ical"]["fractional_threshold"], 	#0.1,
        threshold		= jspar["processing"]["ical"]["threshold"], 	  	#0.1,
        nmajor			= jspar["processing"]["ical"]["nmajor"], 		#5,
        gain			= jspar["processing"]["ical"]["gain"],               	#0.25,
        deconvolve_facets	= jspar["processing"]["ical"]["deconvolve_facets"],  	#8,
        deconvolve_overlap	= jspar["processing"]["ical"]["deconvolve_overlap"], 	#16,
        deconvolve_taper	= jspar["processing"]["ical"]["deconvolve_taper"],   	#'tukey',
        global_solution		= jspar["processing"]["ical"]["global_solution"],    	#False,
        do_selfcal		= jspar["processing"]["ical"]["do_selfcal"],         	#True,
        psf_support		= jspar["processing"]["ical"]["psf_support"])         	#64


    LOG.info('About to run ical')
    result = arlexecute.compute(ical_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image'))
    print(qa_image(restored, context='Restored clean image'))
    export_image_to_fits(restored, '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_restored"]))

    print(qa_image(residual[0], context='Residual clean image'))
    export_image_to_fits(residual[0], '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_residual"]))

    arlexecute.close()
Exemple #4
0
    def actualSetUp(self,
                    add_errors=False,
                    freqwin=5,
                    block=False,
                    dospectral=True,
                    dopol=False,
                    amp_errors=None,
                    phase_errors=None,
                    zerow=True):

        if amp_errors is None:
            amp_errors = {'T': 0.0, 'G': 0.01, 'B': 0.01}
        if phase_errors is None:
            phase_errors = {'T': 1.0, 'G': 0.1, 'B': 0.01}

        self.npixel = 512
        self.low = create_named_configuration('LOWBD2', rmax=750.0)
        self.freqwin = freqwin
        self.vis_list = list()
        self.ntimes = 5
        self.times = numpy.linspace(-3.0, +3.0, self.ntimes) * numpy.pi / 12.0
        self.frequency = numpy.linspace(0.8e8, 1.2e8, self.freqwin)

        if freqwin > 1:
            self.channelwidth = numpy.array(
                freqwin * [self.frequency[1] - self.frequency[0]])
        else:
            self.channelwidth = numpy.array([1e6])

        if dopol:
            self.vis_pol = PolarisationFrame('linear')
            self.image_pol = PolarisationFrame('stokesIQUV')
            f = numpy.array([100.0, 20.0, -10.0, 1.0])
        else:
            self.vis_pol = PolarisationFrame('stokesI')
            self.image_pol = PolarisationFrame('stokesI')
            f = numpy.array([100.0])

        if dospectral:
            flux = numpy.array(
                [f * numpy.power(freq / 1e8, -0.7) for freq in self.frequency])
        else:
            flux = numpy.array([f])

        self.phasecentre = SkyCoord(ra=+180.0 * u.deg,
                                    dec=-60.0 * u.deg,
                                    frame='icrs',
                                    equinox='J2000')
        self.vis_list = [
            arlexecute.execute(ingest_unittest_visibility)(
                self.low, [self.frequency[i]], [self.channelwidth[i]],
                self.times,
                self.vis_pol,
                self.phasecentre,
                block=block,
                zerow=zerow) for i, _ in enumerate(self.frequency)
        ]

        self.model_imagelist = [
            arlexecute.execute(create_unittest_model,
                               nout=freqwin)(self.vis_list[0],
                                             self.image_pol,
                                             npixel=self.npixel)
            for i, _ in enumerate(self.frequency)
        ]

        self.components_list = [
            arlexecute.execute(create_unittest_components)(
                self.model_imagelist[i], flux[i, :][numpy.newaxis, :])
            for i, _ in enumerate(self.frequency)
        ]

        # Apply the LOW primary beam and insert into model
        self.model_imagelist = [
            arlexecute.execute(insert_skycomponent,
                               nout=1)(self.model_imagelist[freqwin],
                                       self.components_list[freqwin])
            for freqwin, _ in enumerate(self.frequency)
        ]

        self.vis_list = [
            arlexecute.execute(predict_skycomponent_visibility)(
                self.vis_list[freqwin], self.components_list[freqwin])
            for freqwin, _ in enumerate(self.frequency)
        ]

        # Calculate the model convolved with a Gaussian.
        self.model_imagelist = arlexecute.compute(self.model_imagelist,
                                                  sync=True)
        model = self.model_imagelist[0]
        self.cmodel = smooth_image(model)
        export_image_to_fits(model,
                             '%s/test_imaging_delayed_model.fits' % self.dir)
        export_image_to_fits(self.cmodel,
                             '%s/test_imaging_delayed_cmodel.fits' % self.dir)

        if add_errors and block:
            self.vis_list = [
                arlexecute.execute(insert_unittest_errors)(
                    self.vis_list[i],
                    amp_errors=amp_errors,
                    phase_errors=phase_errors)
                for i, _ in enumerate(self.frequency)
            ]

        self.vis_list = arlexecute.compute(self.vis_list, sync=True)

        self.vis_list = arlexecute.scatter(self.vis_list)
        self.model_imagelist = arlexecute.scatter(self.model_imagelist)
def main():
    """Workflow stage application."""
    init_logging()

    # Get Dask client
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    LOG.info('Results dir = %s', RESULTS_DIR)
    LOG.info("Starting imaging-modeling")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
       PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)

    try: 	
       with open(PARFILE, "r") as par_file:
             jspar = json.load(par_file)       
    except AssertionError as error:
       LOG.critical('ERROR %s', error)
       return

    # Model parameters
    configuration= jspar["modeling"]["configuration"]["name"]
    num_freq_win = jspar["modeling"]["configuration"]["num_freq_win"] # 7
    num_times    = jspar["modeling"]["configuration"]["num_times"] # 11
    r_max        = jspar["modeling"]["configuration"]["r_max"] # 300.0
    fstart	 = jspar["modeling"]["configuration"]["fstart"]
    fend	 = jspar["modeling"]["configuration"]["fend"]
    timestart_pi = jspar["modeling"]["configuration"]["timestart_pi"] # -1/3
    timeend_pi   = jspar["modeling"]["configuration"]["timeend_pi"] # 1/3
    polframe     = jspar["modeling"]["configuration"]["PolarisationFrame"] # StokesI

    frequency = numpy.linspace(fstart, fend, num_freq_win)
    channel_bw = numpy.array(num_freq_win * [frequency[1] - frequency[0]]) # 0.9e8 ... 1.1e8
    times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi, num_times)

    phase_centre = SkyCoord(	ra     =jspar["modeling"]["phasecentre"]["RA"] * u.deg, 
				dec    =jspar["modeling"]["phasecentre"]["Dec"] * u.deg,
                            	frame  =jspar["modeling"]["phasecentre"]["frame"], 
				equinox=jspar["modeling"]["phasecentre"]["equinox"])

    # Simulate visibilities
    vis_list = simulate_arlexecute(configuration,
                                  frequency=frequency,
                                  channel_bandwidth=channel_bw,
                                  times=times,
                                  phasecentre=phase_centre,
                                  order=jspar["modeling"]["simulate"]["order"],
                                  rmax=r_max)

    LOG.info('%d elements in vis_list', len(vis_list))
    LOG.info('About to make visibility')
    vis_list = arlexecute.compute(vis_list, sync=True)
    LOG.debug('vis_list type: %s', type(vis_list))
    LOG.debug('vis_list element type: %s', type(vis_list[0]))
    try:
        export_blockvisibility_to_hdf5(vis_list,
                                       '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image   = jspar["advice"]["guard_band_image"]
    delA               = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0], guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1], guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)


    vis_slices = advice_low['vis_slices']
    num_pixels = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Create GLEAM model
    gleam_model = [
        arlexecute.execute(create_low_test_image_from_gleam)(
            npixel=num_pixels,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bw[f]],
            cellsize=cellsize,
            phasecentre=phase_centre,
            polarisation_frame=PolarisationFrame(polframe),
            flux_limit=jspar["modeling"]["gleam_model"]["flux_limit"], # 1.0,
            applybeam =jspar["modeling"]["gleam_model"]["applybeam"])  # True
        for f, freq in enumerate(frequency)
    ]


    LOG.info('About to make GLEAM model')
    gleam_model = arlexecute.compute(gleam_model, sync=True)
    # future_gleam_model = arlexecute.scatter(gleam_model)

    # Get predicted visibilities for GLEAM model
    LOG.info('About to run predict to get predicted visibility')
    future_vis_graph = arlexecute.scatter(vis_list)
    predicted_vis_list = predict_arlexecute(future_vis_graph, gleam_model,
                                           context=jspar["modeling"]["predict"]["context"],  #'wstack'
                                           vis_slices=vis_slices)
    predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True)
    corrupted_vis_list = corrupt_arlexecute(predicted_vis_list, phase_error=jspar["modeling"]["corrupt"]["phase_error"]) #1.0

    LOG.info('About to run corrupt to get corrupted visibility')
    corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True)

    LOG.info('About to output predicted_vislist.hdf')
    export_blockvisibility_to_hdf5(predicted_vis_list,
                                   '%s/%s' % (RESULTS_DIR,jspar["files"]["predicted_vis_list"]))

    LOG.info('About to output corrupted_vislist.hdf')

    export_blockvisibility_to_hdf5(corrupted_vis_list,
                                   '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))
    # Close Dask client
    arlexecute.close()
Exemple #6
0
def main():
    """Run the workflow."""
    init_logging()

    LOG.info("Starting imaging-pipeline")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
        PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)
    try:
        with open(PARFILE, "r") as par_file:
            jspar = json.load(par_file)
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    # We will use dask
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    # Import visibility list from HDF5 file
    vis_list = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))

    # Now read the BlockVisibilities constructed using a model drawn from GLEAM
    predicted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))
    corrupted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))

    # Reproduce parameters from the visibility data
    ntimes = vis_list[0].nvis

    phasecentre = vis_list[0].phasecentre
    print(phasecentre)
    polframe = vis_list[0].polarisation_frame.type
    LOG.info("Polarisation Frame of vis_list: %s", polframe)

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image = jspar["advice"]["guard_band_image"]
    delA = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0],
                                   guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1],
                                    guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    npixel = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Recovering frequencies
    fstart = vis_list[0].frequency
    fend = vis_list[-1].frequency
    num_freq_win = len(vis_list)
    frequency = numpy.linspace(fstart, fend, num_freq_win)

    # Recovering bandwidths
    channel_bandwidth = numpy.array(
        num_freq_win * [vis_list[1].frequency - vis_list[0].frequency])

    # Get the LSM. This is currently blank.
    model_list = [
        arlexecute.execute(create_image_from_visibility)(
            vis_list[f],
            npixel=npixel,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bandwidth[f]],
            cellsize=cellsize,
            phasecentre=phasecentre,
            polarisation_frame=PolarisationFrame(polframe))
        for f, freq in enumerate(frequency)
    ]
    # future_predicted_vislist = arlexecute.scatter(predicted_vislist)

    # Create and execute graphs to make the dirty image and PSF
    # LOG.info('About to run invert to get dirty image')
    # dirty_list = invert_component(future_predicted_vislist, model_list,
    #                               context='wstack',
    #                               vis_slices=vis_slices, dopsf=False)
    # dirty_list = arlexecute.compute(dirty_list, sync=True)

    # LOG.info('About to run invert to get PSF')
    # psf_list = invert_component(future_predicted_vislist, model_list,
    #                             context='wstack',
    #                             vis_slices=vis_slices, dopsf=True)
    # psf_list = arlexecute.compute(psf_list, sync=True)

    # Now deconvolve using msclean
    # LOG.info('About to run deconvolve')
    # deconvolve_list, _ = deconvolve_component(
    #     dirty_list, psf_list,
    #     model_imagelist=model_list,
    #     deconvolve_facets=8,
    #     deconvolve_overlap=16,
    #     deconvolve_taper='tukey',
    #     scales=[0, 3, 10],
    #     algorithm='msclean',
    #     niter=1000,
    #     fractional_threshold=0.1,
    #     threshold=0.1,
    #     gain=0.1,
    #     psf_support=64)
    # deconvolved = arlexecute.compute(deconvolve_list, sync=True)

    LOG.info('About to run continuum imaging')
    continuum_imaging_list = continuum_imaging_arlexecute(
        predicted_vislist,
        model_imagelist=model_list,
        context=jspar["processing"]["continuum_imaging"]
        ["context"],  #'wstack',
        vis_slices=vis_slices,
        scales=jspar["processing"]["continuum_imaging"]
        ["scales"],  #[0, 3, 10],
        algorithm=jspar["processing"]["continuum_imaging"]
        ["algorithm"],  #'mmclean',
        nmoment=jspar["processing"]["continuum_imaging"]["nmoment"],  #3,
        niter=jspar["processing"]["continuum_imaging"]["niter"],  #1000,
        fractional_threshold=jspar["processing"]["continuum_imaging"]
        ["fractional_threshold"],  #0.1,
        threshold=jspar["processing"]["continuum_imaging"]["threshold"],  #0.1,
        nmajor=jspar["processing"]["continuum_imaging"]["nmajor"],  #5,
        gain=jspar["processing"]["continuum_imaging"]["gain"],  #0.25,
        deconvolve_facets=jspar["processing"]["continuum_imaging"]
        ["deconvolve_facets"],  #8,
        deconvolve_overlap=jspar["processing"]["continuum_imaging"]
        ["deconvolve_overlap"],  #16,
        deconvolve_taper=jspar["processing"]["continuum_imaging"]
        ["deconvolve_taper"],  #'tukey',
        psf_support=jspar["processing"]["continuum_imaging"]
        ["psf_support"])  #64)
    result = arlexecute.compute(continuum_imaging_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image - no selfcal'))
    print(qa_image(restored, context='Restored clean image - no selfcal'))
    export_image_to_fits(
        restored,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_restored"]))

    print(qa_image(residual[0], context='Residual clean image - no selfcal'))
    export_image_to_fits(
        residual[0],
        '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_residual"]))

    controls = create_calibration_controls()

    controls['T']['first_selfcal'] = jspar["processing"]["controls"]["T"][
        "first_selfcal"]
    controls['G']['first_selfcal'] = jspar["processing"]["controls"]["G"][
        "first_selfcal"]
    controls['B']['first_selfcal'] = jspar["processing"]["controls"]["B"][
        "first_selfcal"]

    controls['T']['timescale'] = jspar["processing"]["controls"]["T"][
        "timescale"]
    controls['G']['timescale'] = jspar["processing"]["controls"]["G"][
        "timescale"]
    controls['B']['timescale'] = jspar["processing"]["controls"]["B"][
        "timescale"]

    PP.pprint(controls)

    future_corrupted_vislist = arlexecute.scatter(corrupted_vislist)
    ical_list = ical_arlexecute(
        future_corrupted_vislist,
        model_imagelist=model_list,
        context=jspar["processing"]["ical"]["context"],  #'wstack',
        calibration_context=jspar["processing"]["ical"]
        ["calibration_context"],  #'TG',
        controls=controls,
        vis_slices=ntimes,
        scales=jspar["processing"]["ical"]["scales"],  #[0, 3, 10],
        timeslice=jspar["processing"]["ical"]["timeslice"],  #'auto',
        algorithm=jspar["processing"]["ical"]["algorithm"],  #'mmclean',
        nmoment=jspar["processing"]["ical"]["nmoment"],  #3,
        niter=jspar["processing"]["ical"]["niter"],  #1000,
        fractional_threshold=jspar["processing"]["ical"]
        ["fractional_threshold"],  #0.1,
        threshold=jspar["processing"]["ical"]["threshold"],  #0.1,
        nmajor=jspar["processing"]["ical"]["nmajor"],  #5,
        gain=jspar["processing"]["ical"]["gain"],  #0.25,
        deconvolve_facets=jspar["processing"]["ical"]
        ["deconvolve_facets"],  #8,
        deconvolve_overlap=jspar["processing"]["ical"]
        ["deconvolve_overlap"],  #16,
        deconvolve_taper=jspar["processing"]["ical"]
        ["deconvolve_taper"],  #'tukey',
        global_solution=jspar["processing"]["ical"]
        ["global_solution"],  #False,
        do_selfcal=jspar["processing"]["ical"]["do_selfcal"],  #True,
        psf_support=jspar["processing"]["ical"]["psf_support"])  #64

    LOG.info('About to run ical')
    result = arlexecute.compute(ical_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image'))
    print(qa_image(restored, context='Restored clean image'))
    export_image_to_fits(
        restored, '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_restored"]))

    print(qa_image(residual[0], context='Residual clean image'))
    export_image_to_fits(
        residual[0], '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_residual"]))

    arlexecute.close()
def main():
    """Workflow stage application."""
    init_logging()

    # Get Dask client
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    LOG.info('Results dir = %s', RESULTS_DIR)
    LOG.info("Starting imaging-modeling")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
        PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)

    try:
        with open(PARFILE, "r") as par_file:
            jspar = json.load(par_file)
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    # Model parameters
    configuration = jspar["modeling"]["configuration"]["name"]
    num_freq_win = jspar["modeling"]["configuration"]["num_freq_win"]  # 7
    num_times = jspar["modeling"]["configuration"]["num_times"]  # 11
    r_max = jspar["modeling"]["configuration"]["r_max"]  # 300.0
    fstart = jspar["modeling"]["configuration"]["fstart"]
    fend = jspar["modeling"]["configuration"]["fend"]
    timestart_pi = jspar["modeling"]["configuration"]["timestart_pi"]  # -1/3
    timeend_pi = jspar["modeling"]["configuration"]["timeend_pi"]  # 1/3
    polframe = jspar["modeling"]["configuration"][
        "PolarisationFrame"]  # StokesI

    frequency = numpy.linspace(fstart, fend, num_freq_win)
    channel_bw = numpy.array(num_freq_win *
                             [frequency[1] - frequency[0]])  # 0.9e8 ... 1.1e8
    times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi,
                           num_times)

    phase_centre = SkyCoord(
        ra=jspar["modeling"]["phasecentre"]["RA"] * u.deg,
        dec=jspar["modeling"]["phasecentre"]["Dec"] * u.deg,
        frame=jspar["modeling"]["phasecentre"]["frame"],
        equinox=jspar["modeling"]["phasecentre"]["equinox"])

    # Simulate visibilities
    vis_list = simulate_arlexecute(
        configuration,
        frequency=frequency,
        channel_bandwidth=channel_bw,
        times=times,
        phasecentre=phase_centre,
        order=jspar["modeling"]["simulate"]["order"],
        rmax=r_max)

    LOG.info('%d elements in vis_list', len(vis_list))
    LOG.info('About to make visibility')
    vis_list = arlexecute.compute(vis_list, sync=True)
    LOG.debug('vis_list type: %s', type(vis_list))
    LOG.debug('vis_list element type: %s', type(vis_list[0]))
    try:
        export_blockvisibility_to_hdf5(
            vis_list, '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image = jspar["advice"]["guard_band_image"]
    delA = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0],
                                   guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1],
                                    guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    num_pixels = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Create GLEAM model
    gleam_model = [
        arlexecute.execute(create_low_test_image_from_gleam)(
            npixel=num_pixels,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bw[f]],
            cellsize=cellsize,
            phasecentre=phase_centre,
            polarisation_frame=PolarisationFrame(polframe),
            flux_limit=jspar["modeling"]["gleam_model"]["flux_limit"],  # 1.0,
            applybeam=jspar["modeling"]["gleam_model"]["applybeam"])  # True
        for f, freq in enumerate(frequency)
    ]

    LOG.info('About to make GLEAM model')
    gleam_model = arlexecute.compute(gleam_model, sync=True)
    # future_gleam_model = arlexecute.scatter(gleam_model)

    # Get predicted visibilities for GLEAM model
    LOG.info('About to run predict to get predicted visibility')
    future_vis_graph = arlexecute.scatter(vis_list)
    predicted_vis_list = predict_arlexecute(
        future_vis_graph,
        gleam_model,
        context=jspar["modeling"]["predict"]["context"],  #'wstack'
        vis_slices=vis_slices)
    predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True)
    corrupted_vis_list = corrupt_arlexecute(
        predicted_vis_list,
        phase_error=jspar["modeling"]["corrupt"]["phase_error"])  #1.0

    LOG.info('About to run corrupt to get corrupted visibility')
    corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True)

    LOG.info('About to output predicted_vislist.hdf')
    export_blockvisibility_to_hdf5(
        predicted_vis_list,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))

    LOG.info('About to output corrupted_vislist.hdf')

    export_blockvisibility_to_hdf5(
        corrupted_vis_list,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))
    # Close Dask client
    arlexecute.close()