def _predict_base(self,
                      context='2d',
                      extra='',
                      fluxthreshold=1.0,
                      facets=1,
                      vis_slices=1,
                      **kwargs):
        vis_list = zero_vislist_arlexecute(self.vis_list)
        vis_list = predict_arlexecute(vis_list,
                                      self.model_list,
                                      context=context,
                                      vis_slices=vis_slices,
                                      facets=facets,
                                      **kwargs)
        vis_list = subtract_vislist_arlexecute(self.vis_list, vis_list)[0]

        vis_list = arlexecute.compute(vis_list, sync=True)

        dirty = invert_arlexecute([vis_list], [self.model_list[0]],
                                  context='2d',
                                  dopsf=False,
                                  normalize=True)[0]
        dirty = arlexecute.compute(dirty, sync=True)

        assert numpy.max(numpy.abs(dirty[0].data)), "Residual image is empty"
        export_image_to_fits(
            dirty[0], '%s/test_imaging_predict_%s%s_%s_dirty.fits' %
            (self.dir, context, extra, arlexecute.type()))

        maxabs = numpy.max(numpy.abs(dirty[0].data))
        assert maxabs < fluxthreshold, "Error %.3f greater than fluxthreshold %.3f " % (
            maxabs, fluxthreshold)
 def test_ical_pipeline(self):
     amp_errors = {'T': 0.0, 'G': 0.00, 'B': 0.0}
     phase_errors = {'T': 0.1, 'G': 0.0, 'B': 0.0}
     self.actualSetUp(add_errors=True, block=True, amp_errors=amp_errors, phase_errors=phase_errors)
     
     controls = create_calibration_controls()
     
     controls['T']['first_selfcal'] = 1
     controls['G']['first_selfcal'] = 3
     controls['B']['first_selfcal'] = 4
     
     controls['T']['timescale'] = 'auto'
     controls['G']['timescale'] = 'auto'
     controls['B']['timescale'] = 1e5
     
     ical_list = \
         ical_workflow(self.vis_list, model_imagelist=self.model_imagelist, context='2d',
                        calibration_context='T', controls=controls, do_selfcal=True,
                        global_solution=False,
                        algorithm='mmclean',
                        facets=1,
                        scales=[0, 3, 10],
                        niter=1000, fractional_threshold=0.1,
                        nmoments=2, nchan=self.freqwin,
                        threshold=2.0, nmajor=5, gain=0.1,
                        deconvolve_facets=8, deconvolve_overlap=16, deconvolve_taper='tukey')
     clean, residual, restored = arlexecute.compute(ical_list, sync=True)
     export_image_to_fits(clean[0], '%s/test_pipelines_ical_pipeline_clean.fits' % self.dir)
     export_image_to_fits(residual[0][0], '%s/test_pipelines_ical_pipeline_residual.fits' % self.dir)
     export_image_to_fits(restored[0], '%s/test_pipelines_ical_pipeline_restored.fits' % self.dir)
     
     qa = qa_image(restored[0])
     assert numpy.abs(qa.data['max'] - 116.9) < 1.0, str(qa)
     assert numpy.abs(qa.data['min'] + 0.118) < 1.0, str(qa)
    def test_create_list_spectral_average_arlexecute(self):
        if not self.casacore_available:
            return

        msfile = arl_path("data/vis/ASKAP_example.ms")

        from workflows.arlexecute.execution_support.arlexecute import arlexecute
        arlexecute.set_client(use_dask=False)

        nchan_ave = 16
        nchan = 192

        def create_and_average(schan):
            max_chan = min(nchan, schan + nchan_ave)
            bv = create_blockvisibility_from_ms(msfile, range(schan, max_chan))
            return integrate_visibility_by_channel(bv[0])

        vis_by_channel_workflow = \
            [arlexecute.execute(create_and_average)(schan) for schan in range(0, nchan, nchan_ave)]

        vis_by_channel = arlexecute.compute(vis_by_channel_workflow)
        arlexecute.close()

        assert len(vis_by_channel) == 12
        for v in vis_by_channel:
            assert v.vis.data.shape[-1] == 4
            assert v.polarisation_frame.type == "linear"
            assert v.vis.data.shape[-2] == 1
Beispiel #4
0
    def test_continuum_imaging_pipeline(self):
        self.actualSetUp(add_errors=False, block=True)
        continuum_imaging_list = \
            continuum_imaging_arlexecute(self.vis_list, model_imagelist=self.model_imagelist, context='2d',
                                         algorithm='mmclean', facets=1,
                                         scales=[0, 3, 10],
                                         niter=1000, fractional_threshold=0.1,
                                         nmoments=2, nchan=self.freqwin,
                                         threshold=2.0, nmajor=5, gain=0.1,
                                         deconvolve_facets=8, deconvolve_overlap=16,
                                         deconvolve_taper='tukey')
        clean, residual, restored = arlexecute.compute(continuum_imaging_list,
                                                       sync=True)
        export_image_to_fits(
            clean[0],
            '%s/test_pipelines_continuum_imaging_pipeline_clean.fits' %
            self.dir)
        export_image_to_fits(
            residual[0][0],
            '%s/test_pipelines_continuum_imaging_pipeline_residual.fits' %
            self.dir)
        export_image_to_fits(
            restored[0],
            '%s/test_pipelines_continuum_imaging_pipeline_restored.fits' %
            self.dir)

        qa = qa_image(restored[0])
        assert numpy.abs(qa.data['max'] - 116.9) < 1.0, str(qa)
        assert numpy.abs(qa.data['min'] + 0.118) < 1.0, str(qa)
    def _invert_base(self,
                     context,
                     extra='',
                     fluxthreshold=1.0,
                     positionthreshold=1.0,
                     check_components=True,
                     facets=1,
                     vis_slices=1,
                     **kwargs):

        dirty = invert_arlexecute(self.vis_list,
                                  self.model_list,
                                  context=context,
                                  dopsf=False,
                                  normalize=True,
                                  facets=facets,
                                  vis_slices=vis_slices,
                                  **kwargs)[0]
        dirty = arlexecute.compute(dirty, sync=True)

        export_image_to_fits(
            dirty[0], '%s/test_imaging_invert_%s%s_%s_dirty.fits' %
            (self.dir, context, extra, arlexecute.type()))

        assert numpy.max(numpy.abs(dirty[0].data)), "Image is empty"

        if check_components:
            self._checkcomponents(dirty[0], fluxthreshold, positionthreshold)
    def test_deconvolve_spectral(self):
        self.actualSetUp(add_errors=True)
        dirty_imagelist = invert_workflow(self.vis_list,
                                          self.model_imagelist,
                                          context='2d',
                                          dopsf=False,
                                          normalize=True)
        psf_imagelist = invert_workflow(self.vis_list,
                                        self.model_imagelist,
                                        context='2d',
                                        dopsf=True,
                                        normalize=True)
        deconvolved, _ = deconvolve_workflow(dirty_imagelist,
                                             psf_imagelist,
                                             self.model_imagelist,
                                             niter=1000,
                                             fractional_threshold=0.1,
                                             scales=[0, 3, 10],
                                             threshold=0.1,
                                             gain=0.7)
        deconvolved = arlexecute.compute(deconvolved, sync=True)

        export_image_to_fits(
            deconvolved[0], '%s/test_imaging_%s_deconvolve_spectral.fits' %
            (self.dir, arlexecute.type()))
 def test_create_simulate_vis_list(self):
     vis_list = simulate_arlexecute(
         frequency=self.frequency, channel_bandwidth=self.channel_bandwidth)
     assert len(vis_list) == len(self.frequency)
     vt = arlexecute.compute(vis_list[0])
     assert isinstance(vt, BlockVisibility)
     assert vt.nvis > 0
    def test_useFunction(self):
        def square(x):
            return x**2

        arlexecute.set_client(use_dask=False)
        graph = arlexecute.execute(square)(numpy.arange(10))
        assert (arlexecute.compute(graph) == numpy.array(
            [0, 1, 4, 9, 16, 25, 36, 49, 64, 81])).all()
        arlexecute.close()
 def test_create_simulate_vis_list(self):
     arlexecute.set_client(use_dask=False)
     vis_list = simulate_workflow(frequency=self.frequency,
                                  channel_bandwidth=self.channel_bandwidth)
     assert len(vis_list) == len(self.frequency)
     vt = arlexecute.compute(vis_list[0])
     assert isinstance(vt, BlockVisibility)
     assert vt.nvis > 0
     arlexecute.close()
    def test_useDaskSync(self):
        def square(x):
            return x**2

        arlexecute.set_client(use_dask=True)
        graph = arlexecute.execute(square)(numpy.arange(10))
        result = arlexecute.compute(graph, sync=True)
        assert (result == numpy.array([0, 1, 4, 9, 16, 25, 36, 49, 64,
                                       81])).all()
        arlexecute.close()
Beispiel #11
0
    def test_create_generic_image_workflow(self):
        def imagerooter(im):
            im.data = numpy.sqrt(numpy.abs(im.data))
            return im
        
        root = generic_image_workflow(imagerooter, self.image, facets=4)
        root = arlexecute.compute(root, sync=True)
        arlexecute.close()

        numpy.testing.assert_array_almost_equal_nulp(root.data ** 2, numpy.abs(self.image.data), 7)
Beispiel #12
0
    def test_create_generic_blockvisibility_workflow(self):
        self.blockvis = [create_blockvisibility(self.lowcore, self.times, self.frequency,
                                                phasecentre=self.phasecentre,
                                                channel_bandwidth=self.channel_bandwidth,
                                                weight=1.0,
                                                polarisation_frame=PolarisationFrame('stokesI'))]
        
        self.blockvis = generic_blockvisibility_workflow(predict_skycomponent_visibility,
                                                          vis_list=self.blockvis,
                                                          sc=self.comp)[0]
        
        self.blockvis = arlexecute.compute(self.blockvis, sync=True)
        arlexecute.close()

        assert numpy.max(numpy.abs(self.blockvis[0].vis)) > 0.0
 def test_modelpartition_solve_arlexecute(self):
     
     self.actualSetup(doiso=True)
     
     self.skymodel_list = [arlexecute.execute(SkyModel, nout=1)(components=[cm])
                           for cm in self.components]
     
     modelpartition_list = solve_modelpartition_arlexecute(self.vis, skymodel_list=self.skymodel_list, niter=30,
                                                           gain=0.25)
     skymodel, residual_vis = arlexecute.compute(modelpartition_list, sync=True)
     
     residual_vis = convert_blockvisibility_to_visibility(residual_vis)
     residual_vis, _, _ = weight_visibility(residual_vis, self.beam)
     dirty, sumwt = invert_arlexecute(residual_vis, self.beam, context='2d')
     export_image_to_fits(dirty, "%s/test_modelpartition-%s-final-iso-residual.fits" % (self.dir, arlexecute.type()))
     
     qa = qa_image(dirty)
     assert qa.data['rms'] < 3.2e-3, qa
 def test_deconvolve_and_restore_cube_mmclean(self):
     self.actualSetUp(add_errors=True)
     dirty_imagelist = invert_arlexecute(self.vis_list, self.model_imagelist, context='2d',
                                         dopsf=False, normalize=True)
     psf_imagelist = invert_arlexecute(self.vis_list, self.model_imagelist, context='2d',
                                       dopsf=True, normalize=True)
     dec_imagelist, _ = deconvolve_arlexecute(dirty_imagelist, psf_imagelist, self.model_imagelist, niter=1000,
                                              fractional_threshold=0.01, scales=[0, 3, 10],
                                              algorithm='mmclean', nmoments=3, nchan=self.freqwin,
                                              threshold=0.1, gain=0.7)
     residual_imagelist = residual_arlexecute(self.vis_list, model_imagelist=dec_imagelist,
                                              context='wstack', vis_slices=51)
     restored = restore_arlexecute(model_imagelist=dec_imagelist, psf_imagelist=psf_imagelist,
                                   residual_imagelist=residual_imagelist,
                                   empty=self.model_imagelist)[0]
     
     restored = arlexecute.compute(restored, sync=True)
     
     export_image_to_fits(restored, '%s/test_imaging_%s_mmclean_restored.fits' % (self.dir, arlexecute.type()))
    def test_weighting(self):

        self.actualSetUp()

        context = 'wstack'
        vis_slices = 41
        facets = 1

        dirty_graph = invert_workflow(self.vis_list,
                                      self.model_graph,
                                      context=context,
                                      dopsf=False,
                                      normalize=True,
                                      facets=facets,
                                      vis_slices=vis_slices)
        dirty = arlexecute.compute(dirty_graph[0], sync=True)
        export_image_to_fits(
            dirty[0], '%s/test_imaging_noweighting_%s_dirty.fits' %
            (self.dir, arlexecute.type()))
Beispiel #16
0
 def actualSetUp(self, add_errors=False, freqwin=1, block=False, dospectral=True, dopol=False, zerow=False):
     
     arlexecute.set_client(use_dask=False)
     
     self.npixel = 256
     self.low = create_named_configuration('LOWBD2', rmax=750.0)
     self.freqwin = freqwin
     self.vis_list = list()
     self.ntimes = 5
     self.times = numpy.linspace(-3.0, +3.0, self.ntimes) * numpy.pi / 12.0
     
     if freqwin > 1:
         self.frequency = numpy.linspace(0.8e8, 1.2e8, self.freqwin)
         self.channelwidth = numpy.array(freqwin * [self.frequency[1] - self.frequency[0]])
     else:
         self.frequency = numpy.array([0.8e8])
         self.channelwidth = numpy.array([1e6])
     
     if dopol:
         self.vis_pol = PolarisationFrame('linear')
         self.image_pol = PolarisationFrame('stokesIQUV')
         f = numpy.array([100.0, 20.0, -10.0, 1.0])
     else:
         self.vis_pol = PolarisationFrame('stokesI')
         self.image_pol = PolarisationFrame('stokesI')
         f = numpy.array([100.0])
     
     if dospectral:
         flux = numpy.array([f * numpy.power(freq / 1e8, -0.7) for freq in self.frequency])
     else:
         flux = numpy.array([f])
     
     self.phasecentre = SkyCoord(ra=+180.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000')
     self.vis_list = [arlexecute.execute(ingest_unittest_visibility)(self.low,
                                                                     [self.frequency[freqwin]],
                                                                     [self.channelwidth[freqwin]],
                                                                     self.times,
                                                                     self.vis_pol,
                                                                     self.phasecentre, block=block,
                                                                     zerow=zerow)
                      for freqwin, _ in enumerate(self.frequency)]
     
     self.model_list = [arlexecute.execute(create_unittest_model, nout=freqwin)(self.vis_list[freqwin],
                                                                                 self.image_pol,
                                                                                 npixel=self.npixel)
                         for freqwin, _ in enumerate(self.frequency)]
     
     self.components_list = [arlexecute.execute(create_unittest_components)(self.model_list[freqwin],
                                                                             flux[freqwin, :][numpy.newaxis, :])
                              for freqwin, _ in enumerate(self.frequency)]
     
     self.model_list = [arlexecute.execute(insert_skycomponent, nout=1)(self.model_list[freqwin],
                                                                         self.components_list[freqwin])
                         for freqwin, _ in enumerate(self.frequency)]
     
     self.vis_list = [arlexecute.execute(predict_skycomponent_visibility)(self.vis_list[freqwin],
                                                                          self.components_list[freqwin])
                      for freqwin, _ in enumerate(self.frequency)]
     
     # Calculate the model convolved with a Gaussian.
     self.model = arlexecute.compute(self.model_list[0], sync=True)
     
     self.cmodel = smooth_image(self.model)
     export_image_to_fits(self.model, '%s/test_imaging_model.fits' % self.dir)
     export_image_to_fits(self.cmodel, '%s/test_imaging_cmodel.fits' % self.dir)
     
     if add_errors and block:
         self.vis_list = [arlexecute.execute(insert_unittest_errors)(self.vis_list[i])
                          for i, _ in enumerate(self.frequency)]
     
     self.vis = arlexecute.compute(self.vis_list[0], sync=True)
     
     self.components = arlexecute.compute(self.components_list[0], sync=True)
Beispiel #17
0
    times = numpy.linspace(-numpy.pi / 3.0, numpy.pi / 3.0, ntimes)
    phasecentre = SkyCoord(ra=+30.0 * u.deg,
                           dec=-60.0 * u.deg,
                           frame='icrs',
                           equinox='J2000')

    vis_list = simulate_workflow('LOWBD2',
                                 rmax=rmax,
                                 frequency=frequency,
                                 channel_bandwidth=channel_bandwidth,
                                 times=times,
                                 phasecentre=phasecentre,
                                 order='frequency')
    print('%d elements in vis_list' % len(vis_list))
    log.info('About to make visibility')
    vis_list = arlexecute.compute(vis_list, sync=True)

    print(vis_list[0])

    # In[ ]:

    wprojection_planes = 1
    advice_low = advise_wide_field(vis_list[0],
                                   guard_band_image=8.0,
                                   delA=0.02,
                                   wprojection_planes=wprojection_planes)

    advice_high = advise_wide_field(vis_list[-1],
                                    guard_band_image=8.0,
                                    delA=0.02,
                                    wprojection_planes=wprojection_planes)
                                threshold=0.1,
                                nmajor=5,
                                gain=0.25,
                                deconvolve_facets=8,
                                deconvolve_overlap=32,
                                deconvolve_taper='tukey',
                                vis_slices=ntimes,
                                timeslice='auto',
                                global_solution=False,
                                psf_support=64,
                                do_selfcal=True)

    # In[ ]:

    log.info('About to run ical_serial')
    result = arlexecute.compute(ical_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]
    arlexecute.close()

    show_image(deconvolved,
               title='Clean image',
               cm='Greys',
               vmax=0.1,
               vmin=-0.01)
    print(qa_image(deconvolved, context='Clean image'))
    plt.show()
    export_image_to_fits(deconvolved,
                         '%s/gleam_ical_deconvolved.fits' % (results_dir))
def main():
    """Run the workflow."""
    init_logging()

    LOG.info("Starting imaging-pipeline")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
       PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)
    try: 	
       with open(PARFILE, "r") as par_file:
             jspar = json.load(par_file)       
    except AssertionError as error:
       LOG.critical('ERROR %s', error)
       return

    # We will use dask
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    # Import visibility list from HDF5 file
    vis_list = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))

    # Now read the BlockVisibilities constructed using a model drawn from GLEAM
    predicted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))
    corrupted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))

# Reproduce parameters from the visibility data
    ntimes = vis_list[0].nvis

    phasecentre = vis_list[0].phasecentre
    print(phasecentre)
    polframe = vis_list[0].polarisation_frame.type
    LOG.info("Polarisation Frame of vis_list: %s", polframe)

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image   = jspar["advice"]["guard_band_image"]
    delA               = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0], guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1], guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    npixel = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])
    
# Recovering frequencies
    fstart = vis_list[0].frequency
    fend = vis_list[-1].frequency
    num_freq_win = len(vis_list)
    frequency = numpy.linspace(fstart, fend, num_freq_win)

# Recovering bandwidths
    channel_bandwidth = numpy.array(num_freq_win * [vis_list[1].frequency - vis_list[0].frequency])
    
    # Get the LSM. This is currently blank.
    model_list = [
        arlexecute.execute(create_image_from_visibility)(
            vis_list[f],
            npixel=npixel,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bandwidth[f]],
            cellsize=cellsize,
            phasecentre=phasecentre,
            polarisation_frame=PolarisationFrame(polframe))
        for f, freq in enumerate(frequency)
    ]
    # future_predicted_vislist = arlexecute.scatter(predicted_vislist)

    # Create and execute graphs to make the dirty image and PSF
    # LOG.info('About to run invert to get dirty image')
    # dirty_list = invert_component(future_predicted_vislist, model_list,
    #                               context='wstack',
    #                               vis_slices=vis_slices, dopsf=False)
    # dirty_list = arlexecute.compute(dirty_list, sync=True)

    # LOG.info('About to run invert to get PSF')
    # psf_list = invert_component(future_predicted_vislist, model_list,
    #                             context='wstack',
    #                             vis_slices=vis_slices, dopsf=True)
    # psf_list = arlexecute.compute(psf_list, sync=True)

    # Now deconvolve using msclean
    # LOG.info('About to run deconvolve')
    # deconvolve_list, _ = deconvolve_component(
    #     dirty_list, psf_list,
    #     model_imagelist=model_list,
    #     deconvolve_facets=8,
    #     deconvolve_overlap=16,
    #     deconvolve_taper='tukey',
    #     scales=[0, 3, 10],
    #     algorithm='msclean',
    #     niter=1000,
    #     fractional_threshold=0.1,
    #     threshold=0.1,
    #     gain=0.1,
    #     psf_support=64)
    # deconvolved = arlexecute.compute(deconvolve_list, sync=True)

    LOG.info('About to run continuum imaging')
    continuum_imaging_list = continuum_imaging_arlexecute(
        predicted_vislist,
        model_imagelist		= model_list,
        context			= jspar["processing"]["continuum_imaging"]["context"] , #'wstack',
        vis_slices		= vis_slices,
        scales			= jspar["processing"]["continuum_imaging"]["scales"],             #[0, 3, 10],
        algorithm		= jspar["processing"]["continuum_imaging"]["algorithm"],          #'mmclean',
        nmoment			= jspar["processing"]["continuum_imaging"]["nmoment"],            #3,
        niter			= jspar["processing"]["continuum_imaging"]["niter"],		  #1000,
        fractional_threshold	= jspar["processing"]["continuum_imaging"]["fractional_threshold"], #0.1,
        threshold		= jspar["processing"]["continuum_imaging"]["threshold"], 	  #0.1,
        nmajor			= jspar["processing"]["continuum_imaging"]["nmajor"], 		  #5,
        gain			= jspar["processing"]["continuum_imaging"]["gain"],               #0.25,
        deconvolve_facets	= jspar["processing"]["continuum_imaging"]["deconvolve_facets"],  #8,
        deconvolve_overlap	= jspar["processing"]["continuum_imaging"]["deconvolve_overlap"], #16,
        deconvolve_taper	= jspar["processing"]["continuum_imaging"]["deconvolve_taper"],   #'tukey',
        psf_support		= jspar["processing"]["continuum_imaging"]["psf_support"] )        #64)
    result = arlexecute.compute(continuum_imaging_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image - no selfcal'))
    print(qa_image(restored, context='Restored clean image - no selfcal'))
    export_image_to_fits(restored,
                         '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_restored"]))

    print(qa_image(residual[0], context='Residual clean image - no selfcal'))
    export_image_to_fits(residual[0],
                         '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_residual"]))

    controls = create_calibration_controls()

    controls['T']['first_selfcal'] = jspar["processing"]["controls"]["T"]["first_selfcal"]
    controls['G']['first_selfcal'] = jspar["processing"]["controls"]["G"]["first_selfcal"]
    controls['B']['first_selfcal'] = jspar["processing"]["controls"]["B"]["first_selfcal"]

    controls['T']['timescale'] = jspar["processing"]["controls"]["T"]["timescale"]
    controls['G']['timescale'] = jspar["processing"]["controls"]["G"]["timescale"]
    controls['B']['timescale'] = jspar["processing"]["controls"]["B"]["timescale"]

    PP.pprint(controls)

    future_corrupted_vislist = arlexecute.scatter(corrupted_vislist)
    ical_list = ical_arlexecute(future_corrupted_vislist,
        model_imagelist		= model_list,
        context			= jspar["processing"]["ical"]["context"] , 		#'wstack',
        calibration_context	= jspar["processing"]["ical"]["calibration_context"] , 	#'TG',
        controls		= controls,
        vis_slices		= ntimes,
        scales			= jspar["processing"]["ical"]["scales"],             	#[0, 3, 10],
        timeslice		= jspar["processing"]["ical"]["timeslice"],          	#'auto',
        algorithm		= jspar["processing"]["ical"]["algorithm"],          	#'mmclean',
        nmoment			= jspar["processing"]["ical"]["nmoment"],            	#3,
        niter			= jspar["processing"]["ical"]["niter"],		  	#1000,
        fractional_threshold	= jspar["processing"]["ical"]["fractional_threshold"], 	#0.1,
        threshold		= jspar["processing"]["ical"]["threshold"], 	  	#0.1,
        nmajor			= jspar["processing"]["ical"]["nmajor"], 		#5,
        gain			= jspar["processing"]["ical"]["gain"],               	#0.25,
        deconvolve_facets	= jspar["processing"]["ical"]["deconvolve_facets"],  	#8,
        deconvolve_overlap	= jspar["processing"]["ical"]["deconvolve_overlap"], 	#16,
        deconvolve_taper	= jspar["processing"]["ical"]["deconvolve_taper"],   	#'tukey',
        global_solution		= jspar["processing"]["ical"]["global_solution"],    	#False,
        do_selfcal		= jspar["processing"]["ical"]["do_selfcal"],         	#True,
        psf_support		= jspar["processing"]["ical"]["psf_support"])         	#64


    LOG.info('About to run ical')
    result = arlexecute.compute(ical_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image'))
    print(qa_image(restored, context='Restored clean image'))
    export_image_to_fits(restored, '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_restored"]))

    print(qa_image(residual[0], context='Residual clean image'))
    export_image_to_fits(residual[0], '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_residual"]))

    arlexecute.close()
def main():
    """Workflow stage application."""
    init_logging()

    # Get Dask client
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    LOG.info('Results dir = %s', RESULTS_DIR)
    LOG.info("Starting imaging-modeling")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
        PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)

    try:
        with open(PARFILE, "r") as par_file:
            jspar = json.load(par_file)
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    # Model parameters
    configuration = jspar["modeling"]["configuration"]["name"]
    num_freq_win = jspar["modeling"]["configuration"]["num_freq_win"]  # 7
    num_times = jspar["modeling"]["configuration"]["num_times"]  # 11
    r_max = jspar["modeling"]["configuration"]["r_max"]  # 300.0
    fstart = jspar["modeling"]["configuration"]["fstart"]
    fend = jspar["modeling"]["configuration"]["fend"]
    timestart_pi = jspar["modeling"]["configuration"]["timestart_pi"]  # -1/3
    timeend_pi = jspar["modeling"]["configuration"]["timeend_pi"]  # 1/3
    polframe = jspar["modeling"]["configuration"][
        "PolarisationFrame"]  # StokesI

    frequency = numpy.linspace(fstart, fend, num_freq_win)
    channel_bw = numpy.array(num_freq_win *
                             [frequency[1] - frequency[0]])  # 0.9e8 ... 1.1e8
    times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi,
                           num_times)

    phase_centre = SkyCoord(
        ra=jspar["modeling"]["phasecentre"]["RA"] * u.deg,
        dec=jspar["modeling"]["phasecentre"]["Dec"] * u.deg,
        frame=jspar["modeling"]["phasecentre"]["frame"],
        equinox=jspar["modeling"]["phasecentre"]["equinox"])

    # Simulate visibilities
    vis_list = simulate_arlexecute(
        configuration,
        frequency=frequency,
        channel_bandwidth=channel_bw,
        times=times,
        phasecentre=phase_centre,
        order=jspar["modeling"]["simulate"]["order"],
        rmax=r_max)

    LOG.info('%d elements in vis_list', len(vis_list))
    LOG.info('About to make visibility')
    vis_list = arlexecute.compute(vis_list, sync=True)
    LOG.debug('vis_list type: %s', type(vis_list))
    LOG.debug('vis_list element type: %s', type(vis_list[0]))
    try:
        export_blockvisibility_to_hdf5(
            vis_list, '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image = jspar["advice"]["guard_band_image"]
    delA = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0],
                                   guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1],
                                    guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    num_pixels = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Create GLEAM model
    gleam_model = [
        arlexecute.execute(create_low_test_image_from_gleam)(
            npixel=num_pixels,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bw[f]],
            cellsize=cellsize,
            phasecentre=phase_centre,
            polarisation_frame=PolarisationFrame(polframe),
            flux_limit=jspar["modeling"]["gleam_model"]["flux_limit"],  # 1.0,
            applybeam=jspar["modeling"]["gleam_model"]["applybeam"])  # True
        for f, freq in enumerate(frequency)
    ]

    LOG.info('About to make GLEAM model')
    gleam_model = arlexecute.compute(gleam_model, sync=True)
    # future_gleam_model = arlexecute.scatter(gleam_model)

    # Get predicted visibilities for GLEAM model
    LOG.info('About to run predict to get predicted visibility')
    future_vis_graph = arlexecute.scatter(vis_list)
    predicted_vis_list = predict_arlexecute(
        future_vis_graph,
        gleam_model,
        context=jspar["modeling"]["predict"]["context"],  #'wstack'
        vis_slices=vis_slices)
    predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True)
    corrupted_vis_list = corrupt_arlexecute(
        predicted_vis_list,
        phase_error=jspar["modeling"]["corrupt"]["phase_error"])  #1.0

    LOG.info('About to run corrupt to get corrupted visibility')
    corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True)

    LOG.info('About to output predicted_vislist.hdf')
    export_blockvisibility_to_hdf5(
        predicted_vis_list,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))

    LOG.info('About to output corrupted_vislist.hdf')

    export_blockvisibility_to_hdf5(
        corrupted_vis_list,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))
    # Close Dask client
    arlexecute.close()
Beispiel #21
0
def main():
    """Run the workflow."""
    init_logging()

    LOG.info("Starting imaging-pipeline")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
        PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)
    try:
        with open(PARFILE, "r") as par_file:
            jspar = json.load(par_file)
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    # We will use dask
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    # Import visibility list from HDF5 file
    vis_list = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))

    # Now read the BlockVisibilities constructed using a model drawn from GLEAM
    predicted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["predicted_vis_list"]))
    corrupted_vislist = import_blockvisibility_from_hdf5(
        '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))

    # Reproduce parameters from the visibility data
    ntimes = vis_list[0].nvis

    phasecentre = vis_list[0].phasecentre
    print(phasecentre)
    polframe = vis_list[0].polarisation_frame.type
    LOG.info("Polarisation Frame of vis_list: %s", polframe)

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image = jspar["advice"]["guard_band_image"]
    delA = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0],
                                   guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1],
                                    guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)

    vis_slices = advice_low['vis_slices']
    npixel = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Recovering frequencies
    fstart = vis_list[0].frequency
    fend = vis_list[-1].frequency
    num_freq_win = len(vis_list)
    frequency = numpy.linspace(fstart, fend, num_freq_win)

    # Recovering bandwidths
    channel_bandwidth = numpy.array(
        num_freq_win * [vis_list[1].frequency - vis_list[0].frequency])

    # Get the LSM. This is currently blank.
    model_list = [
        arlexecute.execute(create_image_from_visibility)(
            vis_list[f],
            npixel=npixel,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bandwidth[f]],
            cellsize=cellsize,
            phasecentre=phasecentre,
            polarisation_frame=PolarisationFrame(polframe))
        for f, freq in enumerate(frequency)
    ]
    # future_predicted_vislist = arlexecute.scatter(predicted_vislist)

    # Create and execute graphs to make the dirty image and PSF
    # LOG.info('About to run invert to get dirty image')
    # dirty_list = invert_component(future_predicted_vislist, model_list,
    #                               context='wstack',
    #                               vis_slices=vis_slices, dopsf=False)
    # dirty_list = arlexecute.compute(dirty_list, sync=True)

    # LOG.info('About to run invert to get PSF')
    # psf_list = invert_component(future_predicted_vislist, model_list,
    #                             context='wstack',
    #                             vis_slices=vis_slices, dopsf=True)
    # psf_list = arlexecute.compute(psf_list, sync=True)

    # Now deconvolve using msclean
    # LOG.info('About to run deconvolve')
    # deconvolve_list, _ = deconvolve_component(
    #     dirty_list, psf_list,
    #     model_imagelist=model_list,
    #     deconvolve_facets=8,
    #     deconvolve_overlap=16,
    #     deconvolve_taper='tukey',
    #     scales=[0, 3, 10],
    #     algorithm='msclean',
    #     niter=1000,
    #     fractional_threshold=0.1,
    #     threshold=0.1,
    #     gain=0.1,
    #     psf_support=64)
    # deconvolved = arlexecute.compute(deconvolve_list, sync=True)

    LOG.info('About to run continuum imaging')
    continuum_imaging_list = continuum_imaging_arlexecute(
        predicted_vislist,
        model_imagelist=model_list,
        context=jspar["processing"]["continuum_imaging"]
        ["context"],  #'wstack',
        vis_slices=vis_slices,
        scales=jspar["processing"]["continuum_imaging"]
        ["scales"],  #[0, 3, 10],
        algorithm=jspar["processing"]["continuum_imaging"]
        ["algorithm"],  #'mmclean',
        nmoment=jspar["processing"]["continuum_imaging"]["nmoment"],  #3,
        niter=jspar["processing"]["continuum_imaging"]["niter"],  #1000,
        fractional_threshold=jspar["processing"]["continuum_imaging"]
        ["fractional_threshold"],  #0.1,
        threshold=jspar["processing"]["continuum_imaging"]["threshold"],  #0.1,
        nmajor=jspar["processing"]["continuum_imaging"]["nmajor"],  #5,
        gain=jspar["processing"]["continuum_imaging"]["gain"],  #0.25,
        deconvolve_facets=jspar["processing"]["continuum_imaging"]
        ["deconvolve_facets"],  #8,
        deconvolve_overlap=jspar["processing"]["continuum_imaging"]
        ["deconvolve_overlap"],  #16,
        deconvolve_taper=jspar["processing"]["continuum_imaging"]
        ["deconvolve_taper"],  #'tukey',
        psf_support=jspar["processing"]["continuum_imaging"]
        ["psf_support"])  #64)
    result = arlexecute.compute(continuum_imaging_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image - no selfcal'))
    print(qa_image(restored, context='Restored clean image - no selfcal'))
    export_image_to_fits(
        restored,
        '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_restored"]))

    print(qa_image(residual[0], context='Residual clean image - no selfcal'))
    export_image_to_fits(
        residual[0],
        '%s/%s' % (RESULTS_DIR, jspar["files"]["continuum_imaging_residual"]))

    controls = create_calibration_controls()

    controls['T']['first_selfcal'] = jspar["processing"]["controls"]["T"][
        "first_selfcal"]
    controls['G']['first_selfcal'] = jspar["processing"]["controls"]["G"][
        "first_selfcal"]
    controls['B']['first_selfcal'] = jspar["processing"]["controls"]["B"][
        "first_selfcal"]

    controls['T']['timescale'] = jspar["processing"]["controls"]["T"][
        "timescale"]
    controls['G']['timescale'] = jspar["processing"]["controls"]["G"][
        "timescale"]
    controls['B']['timescale'] = jspar["processing"]["controls"]["B"][
        "timescale"]

    PP.pprint(controls)

    future_corrupted_vislist = arlexecute.scatter(corrupted_vislist)
    ical_list = ical_arlexecute(
        future_corrupted_vislist,
        model_imagelist=model_list,
        context=jspar["processing"]["ical"]["context"],  #'wstack',
        calibration_context=jspar["processing"]["ical"]
        ["calibration_context"],  #'TG',
        controls=controls,
        vis_slices=ntimes,
        scales=jspar["processing"]["ical"]["scales"],  #[0, 3, 10],
        timeslice=jspar["processing"]["ical"]["timeslice"],  #'auto',
        algorithm=jspar["processing"]["ical"]["algorithm"],  #'mmclean',
        nmoment=jspar["processing"]["ical"]["nmoment"],  #3,
        niter=jspar["processing"]["ical"]["niter"],  #1000,
        fractional_threshold=jspar["processing"]["ical"]
        ["fractional_threshold"],  #0.1,
        threshold=jspar["processing"]["ical"]["threshold"],  #0.1,
        nmajor=jspar["processing"]["ical"]["nmajor"],  #5,
        gain=jspar["processing"]["ical"]["gain"],  #0.25,
        deconvolve_facets=jspar["processing"]["ical"]
        ["deconvolve_facets"],  #8,
        deconvolve_overlap=jspar["processing"]["ical"]
        ["deconvolve_overlap"],  #16,
        deconvolve_taper=jspar["processing"]["ical"]
        ["deconvolve_taper"],  #'tukey',
        global_solution=jspar["processing"]["ical"]
        ["global_solution"],  #False,
        do_selfcal=jspar["processing"]["ical"]["do_selfcal"],  #True,
        psf_support=jspar["processing"]["ical"]["psf_support"])  #64

    LOG.info('About to run ical')
    result = arlexecute.compute(ical_list, sync=True)
    deconvolved = result[0][0]
    residual = result[1][0]
    restored = result[2][0]

    print(qa_image(deconvolved, context='Clean image'))
    print(qa_image(restored, context='Restored clean image'))
    export_image_to_fits(
        restored, '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_restored"]))

    print(qa_image(residual[0], context='Residual clean image'))
    export_image_to_fits(
        residual[0], '%s/%s' % (RESULTS_DIR, jspar["files"]["ical_residual"]))

    arlexecute.close()
def main():
    """Workflow stage application."""
    init_logging()

    # Get Dask client
    arlexecute.set_client(get_dask_Client())
    arlexecute.run(init_logging)

    LOG.info('Results dir = %s', RESULTS_DIR)
    LOG.info("Starting imaging-modeling")

    # Read parameters
    PARFILE = 'parameters.json'
    if len(sys.argv) > 1:
       PARFILE = sys.argv[1]
    LOG.info("JSON parameter file = %s", PARFILE)

    try: 	
       with open(PARFILE, "r") as par_file:
             jspar = json.load(par_file)       
    except AssertionError as error:
       LOG.critical('ERROR %s', error)
       return

    # Model parameters
    configuration= jspar["modeling"]["configuration"]["name"]
    num_freq_win = jspar["modeling"]["configuration"]["num_freq_win"] # 7
    num_times    = jspar["modeling"]["configuration"]["num_times"] # 11
    r_max        = jspar["modeling"]["configuration"]["r_max"] # 300.0
    fstart	 = jspar["modeling"]["configuration"]["fstart"]
    fend	 = jspar["modeling"]["configuration"]["fend"]
    timestart_pi = jspar["modeling"]["configuration"]["timestart_pi"] # -1/3
    timeend_pi   = jspar["modeling"]["configuration"]["timeend_pi"] # 1/3
    polframe     = jspar["modeling"]["configuration"]["PolarisationFrame"] # StokesI

    frequency = numpy.linspace(fstart, fend, num_freq_win)
    channel_bw = numpy.array(num_freq_win * [frequency[1] - frequency[0]]) # 0.9e8 ... 1.1e8
    times = numpy.linspace(numpy.pi * timestart_pi, numpy.pi * timeend_pi, num_times)

    phase_centre = SkyCoord(	ra     =jspar["modeling"]["phasecentre"]["RA"] * u.deg, 
				dec    =jspar["modeling"]["phasecentre"]["Dec"] * u.deg,
                            	frame  =jspar["modeling"]["phasecentre"]["frame"], 
				equinox=jspar["modeling"]["phasecentre"]["equinox"])

    # Simulate visibilities
    vis_list = simulate_arlexecute(configuration,
                                  frequency=frequency,
                                  channel_bandwidth=channel_bw,
                                  times=times,
                                  phasecentre=phase_centre,
                                  order=jspar["modeling"]["simulate"]["order"],
                                  rmax=r_max)

    LOG.info('%d elements in vis_list', len(vis_list))
    LOG.info('About to make visibility')
    vis_list = arlexecute.compute(vis_list, sync=True)
    LOG.debug('vis_list type: %s', type(vis_list))
    LOG.debug('vis_list element type: %s', type(vis_list[0]))
    try:
        export_blockvisibility_to_hdf5(vis_list,
                                       '%s/%s' % (RESULTS_DIR, jspar["files"]["vis_list"]))
    except AssertionError as error:
        LOG.critical('ERROR %s', error)
        return

    wprojection_planes = jspar["advice"]["wprojection_planes"]
    guard_band_image   = jspar["advice"]["guard_band_image"]
    delA               = jspar["advice"]["delA"]
    advice_low = advise_wide_field(vis_list[0], guard_band_image=guard_band_image,
                                   delA=delA,
                                   wprojection_planes=wprojection_planes)
    advice_high = advise_wide_field(vis_list[-1], guard_band_image=guard_band_image,
                                    delA=delA,
                                    wprojection_planes=wprojection_planes)


    vis_slices = advice_low['vis_slices']
    num_pixels = advice_high['npixels2']
    cellsize = min(advice_low['cellsize'], advice_high['cellsize'])

    # Create GLEAM model
    gleam_model = [
        arlexecute.execute(create_low_test_image_from_gleam)(
            npixel=num_pixels,
            frequency=[frequency[f]],
            channel_bandwidth=[channel_bw[f]],
            cellsize=cellsize,
            phasecentre=phase_centre,
            polarisation_frame=PolarisationFrame(polframe),
            flux_limit=jspar["modeling"]["gleam_model"]["flux_limit"], # 1.0,
            applybeam =jspar["modeling"]["gleam_model"]["applybeam"])  # True
        for f, freq in enumerate(frequency)
    ]


    LOG.info('About to make GLEAM model')
    gleam_model = arlexecute.compute(gleam_model, sync=True)
    # future_gleam_model = arlexecute.scatter(gleam_model)

    # Get predicted visibilities for GLEAM model
    LOG.info('About to run predict to get predicted visibility')
    future_vis_graph = arlexecute.scatter(vis_list)
    predicted_vis_list = predict_arlexecute(future_vis_graph, gleam_model,
                                           context=jspar["modeling"]["predict"]["context"],  #'wstack'
                                           vis_slices=vis_slices)
    predicted_vis_list = arlexecute.compute(predicted_vis_list, sync=True)
    corrupted_vis_list = corrupt_arlexecute(predicted_vis_list, phase_error=jspar["modeling"]["corrupt"]["phase_error"]) #1.0

    LOG.info('About to run corrupt to get corrupted visibility')
    corrupted_vis_list = arlexecute.compute(corrupted_vis_list, sync=True)

    LOG.info('About to output predicted_vislist.hdf')
    export_blockvisibility_to_hdf5(predicted_vis_list,
                                   '%s/%s' % (RESULTS_DIR,jspar["files"]["predicted_vis_list"]))

    LOG.info('About to output corrupted_vislist.hdf')

    export_blockvisibility_to_hdf5(corrupted_vis_list,
                                   '%s/%s' % (RESULTS_DIR, jspar["files"]["corrupted_vis_list"]))
    # Close Dask client
    arlexecute.close()