Beispiel #1
0
 def create_parametric_mask(self, dataset, radius, cutoff, cat, outputdir):
     """
     Creates a parametric mask using a model from an input catalogue.
     dataset (string): The dataset to get the coordiantes for the model from.
     radius (float): The radius around the pointing centre of the input dataset to consider sources in in deg.
     cutoff (float): The apparent flux percentage to consider sources from 0.0 accounts for no sources, 1.0 for all
                     sources in the catalogue within the search radius of the target field.
     cat (string): The catalogue to search sources in. Possible options are 'NVSS', 'FIRST', and 'WENSS'.
     outputdir (string): The output directory to create the MIRIAD mask file in. The file is named mask.
     """
     lsm.write_mask(outputdir + '/mask.txt', lsm.lsm_mask(dataset, radius, cutoff, cat))
     mskfile = open(outputdir + '/mask.txt', 'r')
     object_ = mskfile.readline().rstrip('\n')
     spar = mskfile.readline()
     mskfile.close()
     imgen = lib.miriad('imgen')
     imgen.imsize = self.selfcal_image_imsize
     imgen.cell = self.selfcal_image_cellsize
     imgen.object = object_
     imgen.spar = spar
     imgen.out = outputdir + '/imgen'
     imgen.go()
     maths = lib.miriad('maths')
     maths.exp = '"<' + outputdir + '/imgen' + '>"'
     maths.mask = '"<' + outputdir + '/imgen>.gt.1e-6' + '"'
     maths.out = outputdir + '/mask'
     maths.go()
     subs_managefiles.director(self, 'rm', outputdir + '/imgen')
     subs_managefiles.director(self, 'rm', outputdir + '/mask.txt')
Beispiel #2
0
    def reset(self):
        """
        Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
        this step!
        """
        subs_setinit.setinitdirs(self)
        nbeams = 37

        logger.warning(' Deleting all converted data.')
        for beam in range(nbeams):
            path = self.get_crosscalsubdir_path(str(beam).zfill(2))
            if path.isdir(path):
                subs_managefiles.director(self, 'rm', path + '/*')
        logger.warning(
            ' Deleting all parameter file entries for CONVERT module')
        subs_param.del_param(self, 'convert_fluxcal_MSavailable')
        subs_param.del_param(self, 'convert_polcal_MSavailable')
        subs_param.del_param(self, 'convert_targetbeams_MSavailable')
        subs_param.del_param(self, 'convert_fluxcal_MS2UVFITS')
        subs_param.del_param(self, 'convert_polcal_MS2UVFITS')
        subs_param.del_param(self, 'convert_targetbeams_MS2UVFITS')
        subs_param.del_param(self, 'convert_fluxcal_UVFITSavailable')
        subs_param.del_param(self, 'convert_polcal_UVFITSavailable')
        subs_param.del_param(self, 'convert_targetbeams_UVFITSavailable')
        subs_param.del_param(self, 'convert_fluxcal_UVFITS2MIRIAD')
        subs_param.del_param(self, 'convert_polcal_UVFITS2MIRIAD')
        subs_param.del_param(self, 'convert_targetbeams_UVFITS2MIRIAD')
Beispiel #3
0
 def reset(self):
     """
     Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in this step!
     """
     subs_setinit.setinitdirs(self)
     logger.warning('Deleting all raw data products and their directories.')
     subs_managefiles.director(self, 'ch', self.basedir)
     deldirs = glob.glob(self.basedir + '[0-9][0-9]' + '/' + self.rawsubdir)
     for dir_ in deldirs:
         subs_managefiles.director(self, 'rm', dir_)
     logger.warning(
         'Deleting all parameter file entries for PREPARE module')
     subs_param.del_param(self, 'prepare_fluxcal_requested')
     subs_param.del_param(self, 'prepare_fluxcal_diskstatus')
     subs_param.del_param(self, 'prepare_fluxcal_altastatus')
     subs_param.del_param(self, 'prepare_fluxcal_copystatus')
     subs_param.del_param(self, 'prepare_fluxcal_rejreason')
     subs_param.del_param(self, 'prepare_polcal_requested')
     subs_param.del_param(self, 'prepare_polcal_diskstatus')
     subs_param.del_param(self, 'prepare_polcal_altastatus')
     subs_param.del_param(self, 'prepare_polcal_copystatus')
     subs_param.del_param(self, 'prepare_polcal_rejreason')
     subs_param.del_param(self, 'prepare_targetbeams_requested')
     subs_param.del_param(self, 'prepare_targetbeams_diskstatus')
     subs_param.del_param(self, 'prepare_targetbeams_altastatus')
     subs_param.del_param(self, 'prepare_targetbeams_copystatus')
     subs_param.del_param(self, 'prepare_targetbeams_rejreason')
Beispiel #4
0
def get_theoretical_noise(self, dataset, gausslimit, startchan=None, endchan=None):
    """
    Subroutine to create a Stokes V image from a dataset and measure the noise, which should be similar to the theoretical one
    image (string): The path to the dataset file.
    startchan(int): First channel to use for imaging, zero-based
    endchan(int): Last channel to use for imaging, zero-based
    returns (numpy array): The rms of the image
    """
    invert = lib.miriad('invert')
    invert.vis = dataset
    invert.map = 'vrms'
    invert.beam = 'vbeam'
    invert.imsize = 1024
    invert.cell = 5
    invert.stokes = 'v'
    invert.slop = 1
    invert.robust = -2
    invert.options='mfs'
    if (startchan and endchan) != None:
        invert.line = 'channel,1,' + str(startchan + 1) + ',' + str(endchan - startchan + 1) + ',' + str(endchan - startchan + 1)
    else:
        pass
    invert.go()
    vmax, vmin, vstd = imstats.getimagestats(self, 'vrms')
    gaussianity = qa.checkimagegaussianity(self, 'vrms', gausslimit)
    if os.path.isdir('vrms') and os.path.isdir('vbeam'):
        managefiles.director(self, 'rm', 'vrms')
        managefiles.director(self, 'rm', 'vbeam')
    else:
        raise ApercalException('Stokes V image was not created successfully. Cannot calculate theoretical noise! No iterative selfcal possible!')
    return gaussianity, vstd
Beispiel #5
0
    def reset_all(self):
        """
        Function to reset the current step and remove all generated data for all beams. Be careful! Deletes all data generated in
        this step!
        """
        subs_setinit.setinitdirs(self)

        for b in range(self.NBEAMS):
            cbeam = 'convert_B' + str(b).zfill(2)

            logger.warning('Beam ' + str(b).zfill(2) +
                           ': Deleting all converted data.')
            path = self.get_crosscalsubdir_path(str(b).zfill(2))
            if os.path.isdir(path):
                subs_managefiles.director(self, 'rm', path + '/*')
            logger.warning(
                'Beam ' + str(b).zfill(2) +
                ': Deleting all parameter file entries for CONVERT module')
            subs_param.del_param(self, cbeam + '_fluxcal_MSavailable')
            subs_param.del_param(self, cbeam + '_polcal_MSavailable')
            subs_param.del_param(self, cbeam + '_targetbeams_MSavailable')
            subs_param.del_param(self, cbeam + '_fluxcal_MS2UVFITS')
            subs_param.del_param(self, cbeam + '_polcal_MS2UVFITS')
            subs_param.del_param(self, cbeam + '_targetbeams_MS2UVFITS')
            subs_param.del_param(self, cbeam + '_fluxcal_UVFITSavailable')
            subs_param.del_param(self, cbeam + '_polcal_UVFITSavailable')
            subs_param.del_param(self, cbeam + '_targetbeams_UVFITSavailable')
            subs_param.del_param(self, cbeam + '_fluxcal_UVFITS2MIRIAD')
            subs_param.del_param(self, cbeam + '_polcal_UVFITS2MIRIAD')
            subs_param.del_param(self, cbeam + '_targetbeams_UVFITS2MIRIAD')
Beispiel #6
0
 def reset(self):
     """
     Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
     this step!
     """
     subs_setinit.setinitdirs(self)
     subs_setinit.setdatasetnamestomiriad(self)
     logger.warning('Deleting all data products ready for transfer!')
     subs_managefiles.director(self, 'ch', self.basedir)
     subs_managefiles.director(self, 'rm', self.transferdir)
Beispiel #7
0
 def reset(self):
     """
     Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
     this step!
     """
     subs_setinit.setinitdirs(self)
     subs_setinit.setdatasetnamestomiriad(self)
     logger.warning(' Deleting all self-calibrated data.')
     subs_managefiles.director(self, 'ch', self.selfcaldir)
     subs_managefiles.director(self, 'rm', self.selfcaldir + '/*')
Beispiel #8
0
 def flagline(self):
     """
     Creates an image cube of the different chunks and measures the rms in each channel. All channels with an rms
     outside of a given sigma interval are flagged in the continuum calibration, but are still used for line imaging.
     """
     if self.selfcal_flagline:
         subs_setinit.setinitdirs(self)
         subs_setinit.setdatasetnamestomiriad(self)
         logger.info(' Automatic flagging of HI-line/RFI started')
         subs_managefiles.director(self, 'ch', self.selfcaldir)
         for chunk in self.list_chunks():
             subs_managefiles.director(self, 'ch', self.selfcaldir + '/' + str(chunk))
             logger.info('Looking through data chunk ' + str(chunk) + ' #')
             invert = lib.miriad('invert')
             invert.vis = chunk + '.mir'
             invert.map = 'map'
             invert.beam = 'beam'
             invert.imsize = self.selfcal_image_imsize
             invert.cell = self.selfcal_image_cellsize
             invert.stokes = 'ii'
             invert.slop = 1
             invert.go()
             if os.path.exists('map'):
                 fits = lib.miriad('fits')
                 fits.in_ = 'map'
                 fits.op = 'xyout'
                 fits.out = 'map.fits'
                 fits.go()
                 cube = pyfits.open('map.fits')
                 data = cube[0].data
                 std = np.nanstd(data, axis=(0, 2, 3))
                 median = np.median(std)
                 stdall = np.nanstd(std)
                 diff = std - median
                 detections = np.where(np.abs(self.selfcal_flagline_sigma * diff) > stdall)[0]
                 if len(detections) > 0:
                     logger.info('Found high noise in channel(s) ' + str(detections).lstrip('[').rstrip(']') + ' #')
                     for d in detections:
                         uvflag = lib.miriad('uvflag')
                         uvflag.vis = chunk + '.mir'
                         uvflag.flagval = 'flag'
                         uvflag.line = "'" + 'channel,1,' + str(d + 1) + "'"
                         uvflag.go()
                     logger.info(
                         'Flagged channel(s) ' + str(detections).lstrip('[').rstrip(']') + ' in data chunk ' + str(
                             chunk) + ' #')
                 else:
                     logger.info('No high noise found in data chunk ' + str(chunk) + ' #')
                 subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'map')
                 subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'map.fits')
                 subs_managefiles.director(self, 'rm', self.selfcaldir + '/' + str(chunk) + '/' + 'beam')
             else:
                 logger.info(' No data in chunk ' + str(chunk) + '!')
         logger.info(' Automatic flagging of HI-line/RFI done')
Beispiel #9
0
    def reset_all(self):
        """
        Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
        this step!
        """
        subs_setinit.setinitdirs(self)
        logger.warning(
            'Deleting all raw data products and their directories for all beams. You will need to '
            'start with the PREPARE step again!')
        subs_managefiles.director(self, 'ch', self.basedir)
        for b in range(self.NBEAMS):

            prebeam = 'prepare_B' + str(b).zfill(2)
            sbeam = 'split_B' + str(b).zfill(2)

            if os.path.isdir(self.basedir + str(b).zfill(2) + '/' +
                             self.rawsubdir):
                try:
                    logger.warning('Beam ' + str(b).zfill(2) +
                                   ': Deleting all raw data products.')
                    subs_managefiles.director(
                        self, 'rm',
                        self.basedir + str(b).zfill(2) + '/' + self.rawsubdir)
                except:
                    pass
                logger.warning(
                    'Beam ' + str(b).zfill(2) +
                    ': Deleting all parameter file entries for PREPARE and SPLIT module.'
                )

                subs_param.del_param(self, prebeam + '_fluxcal_requested')
                subs_param.del_param(self, prebeam + '_fluxcal_diskstatus')
                subs_param.del_param(self, prebeam + '_fluxcal_altastatus')
                subs_param.del_param(self, prebeam + '_fluxcal_copystatus')
                subs_param.del_param(self, prebeam + '_fluxcal_rejreason')
                subs_param.del_param(self, prebeam + '_polcal_requested')
                subs_param.del_param(self, prebeam + '_polcal_diskstatus')
                subs_param.del_param(self, prebeam + '_polcal_altastatus')
                subs_param.del_param(self, prebeam + '_polcal_copystatus')
                subs_param.del_param(self, prebeam + '_polcal_rejreason')
                subs_param.del_param(self, prebeam + '_targetbeams_requested')
                subs_param.del_param(self, prebeam + '_targetbeams_diskstatus')
                subs_param.del_param(self, prebeam + '_targetbeams_altastatus')
                subs_param.del_param(self, prebeam + '_targetbeams_copystatus')
                subs_param.del_param(self, prebeam + '_targetbeams_rejreason')

                subs_param.del_param(self, sbeam + '_fluxcal_status')
                subs_param.del_param(self, sbeam + '_polcal_status')
                subs_param.del_param(self, sbeam + '_targetbeams_status')
            else:
                logger.warning('Beam ' + str(b).zfill(2) +
                               ': No raw data present.')
Beispiel #10
0
 def calc_isum(self, image):
     """
     Function to calculate the sum of the values of the pixels in an image
     image (string): The name of the image file. Must be in MIRIAD-format
     returns (float): the sum of the pxiels in the image
             """
     fits = lib.miriad('fits')
     fits.op = 'xyout'
     fits.in_ = image
     fits.out = image + '.fits'
     fits.go()
     image_data = pyfits.open(image + '.fits')  # Open the image
     data = image_data[0].data
     isum = np.nansum(data)  # Get the maximum
     image_data.close()  # Close the image
     subs_managefiles.director(self, 'rm', image + '.fits')
     return isum
Beispiel #11
0
    def reset(self):
        """
        Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
        this step!
        """
        subs_setinit.setinitdirs(self)

        logger.warning('Beam ' + self.beam +
                       ': Deleting all raw data and their directories.')
        subs_managefiles.director(self, 'ch', self.basedir)
        try:
            subs_managefiles.director(
                self, 'rm', self.basedir + self.beam + '/' + self.rawsubdir)
        except:
            pass
        logger.warning(
            'Beam ' + self.beam +
            ': Deleting all parameter file entries for SPLIT and PREPARE module'
        )

        prebeam = 'prepare_B' + str(self.beam).zfill(2)
        sbeam = 'split_B' + str(self.beam).zfill(2)

        subs_param.del_param(self, prebeam + '_fluxcal_requested')
        subs_param.del_param(self, prebeam + '_fluxcal_diskstatus')
        subs_param.del_param(self, prebeam + '_fluxcal_altastatus')
        subs_param.del_param(self, prebeam + '_fluxcal_copystatus')
        subs_param.del_param(self, prebeam + '_fluxcal_rejreason')
        subs_param.del_param(self, prebeam + '_polcal_requested')
        subs_param.del_param(self, prebeam + '_polcal_diskstatus')
        subs_param.del_param(self, prebeam + '_polcal_altastatus')
        subs_param.del_param(self, prebeam + '_polcal_copystatus')
        subs_param.del_param(self, prebeam + '_polcal_rejreason')
        subs_param.del_param(self, prebeam + '_targetbeams_requested')
        subs_param.del_param(self, prebeam + '_targetbeams_diskstatus')
        subs_param.del_param(self, prebeam + '_targetbeams_altastatus')
        subs_param.del_param(self, prebeam + '_targetbeams_copystatus')
        subs_param.del_param(self, prebeam + '_targetbeams_rejreason')

        subs_param.del_param(self, sbeam + '_fluxcal_status')
        subs_param.del_param(self, sbeam + '_polcal_status')
        subs_param.del_param(self, sbeam + '_targetbeams_status')
Beispiel #12
0
def get_beam(self, image, beam):
    """
    Get the synthesised beam of an image with has not been cleaned
    image (string): Input image to use in MIRIAD format
    beam (string): Beam image for cleaning in MIRIAD format
    return (tuple): Synthesised beam parameters in the order bmaj, bmin, bpa
    """
    clean = lib.miriad('clean')
    clean.map = image
    clean.beam = beam
    clean.out = 'tmp_beampars.cl'
    clean.niters = 1
    clean.region = 'quarter'
    clean.go()
    restor = lib.miriad('restor')  # Create the restored image
    restor.model = 'tmp_beampars.cl'
    restor.beam = beam
    restor.map = image
    restor.out = 'tmp_beampars.rstr'
    restor.mode = 'clean'
    restor.go()
    convim.mirtofits('tmp_beampars.rstr', 'tmp_beampars.fits')
    pyfile = pyfits.open('tmp_beampars.fits')
    header = pyfile[0].header
    bmaj = header['BMAJ']
    bmin = header['BMIN']
    bpa = header['BPA']
    beampars = bmaj, bmin, bpa
    managefiles.director(self, 'rm', 'tmp_beampars.cl')
    managefiles.director(self, 'rm', 'tmp_beampars.rstr')
    managefiles.director(self, 'rm', 'tmp_beampars.fits')
    return beampars
Beispiel #13
0
 def reset(self):
     """
     Function to reset the current step and remove all generated data. Be careful! Deletes all data generated in
     this step!
     """
     subs_setinit.setinitdirs(self)
     subs_setinit.setdatasetnamestomiriad(self)
     if os.path.isdir(self.mosdir):
         logger.warning('Deleting all mosaicked data products.')
         subs_managefiles.director(self, 'ch', self.basedir)
         subs_managefiles.director(self, 'rm', self.mosdir)
         logger.warning(
             'Deleting all parameter file entries for MOSAIC module')
         subs_param.del_param(self, 'mosaic_continuum_mf_status')
         subs_param.del_param(self, 'mosaic_continuum_mf_continuumstatus')
         subs_param.del_param(self, 'mosaic_continuum_mf_copystatus')
         subs_param.del_param(self, 'mosaic_continuum_mf_convolstatus')
         subs_param.del_param(self,
                              'mosaic_continuum_mf_continuumbeamparams')
         subs_param.del_param(self,
                              'mosaic_continuum_mf_continuumimagestats')
     else:
         logger.warning('Mosaicked data products are not present!')
Beispiel #14
0
def create_mask(self, image, mask, threshold, theoretical_noise, beampars=None, rms_map=None):
    """
    Creates a mask from an image using pybdsf
    image (string): Input image to use in MIRIAD format
    mask (string): Output mask image in MIRIAD format
    threshold (float): Threshold in Jy to use
    theoretical_noise (float): Theoretical noise for calculating the adaptive threshold parameter inside pybdsf
    """
    convim.mirtofits(image, image + '.fits')
    bdsf_threshold = threshold / theoretical_noise
    if beampars:
#        bdsf.process_image(image + '.fits', stop_at='isl', thresh_isl=bdsf_threshold, beam=beampars, adaptive_rms_box=True, rms_map=rms_map).export_image(outfile=mask + '.fits', img_format='fits', img_type='island_mask', pad_image=True)
        bdsf.process_image(image + '.fits', stop_at='isl', thresh_isl=bdsf_threshold, beam=beampars, adaptive_rms_box=True, rms_map=False, rms_value=theoretical_noise).export_image(outfile=mask + '.fits', img_format='fits', img_type='island_mask', pad_image=True)
    else:
        bdsf.process_image(image + '.fits', stop_at='isl', thresh_isl=bdsf_threshold, adaptive_rms_box=True, rms_map=False, rms_value=theoretical_noise).export_image(outfile=mask + '.fits', img_format='fits', img_type='island_mask', pad_image=True)
    if os.path.isfile(mask + '.fits'):
        # Add a random number to the masks to make it viewable in kvis
        fitsmask = pyfits.open(mask + '.fits')
        fitsmask_data = fitsmask[0].data
        fitsmask_hdr = fitsmask[0].header
        rand_array = np.random.rand(int(fitsmask_hdr['NAXIS1']), int(fitsmask_hdr['NAXIS2']))
        fitsmask[0].data = np.multiply(rand_array, fitsmask_data)
        fitsmask.writeto(mask + '.fits', clobber=True)
        # Convert mask to MIRIAD and generate a usable one for MIRIAD
        convim.fitstomir(mask + '.fits', mask + '_pybdsf')
        maths = lib.miriad('maths')
        maths.out = mask
        maths.exp = '"<' + mask + '_pybdsf>"'
        maths.mask = '"<' + mask + '_pybdsf>.gt.0' + '"'
        maths.go()
        managefiles.director(self, 'rm', image + '.fits.pybdsf.log')
        managefiles.director(self, 'rm', image + '.fits')
        managefiles.director(self, 'rm', mask + '.fits')
        managefiles.director(self, 'rm', mask + '_pybdsf')
    else:
        pass
Beispiel #15
0
def blank_corners(self, mask, imsize):
    """
    Blanks the outer edges of a mask to make it always work with mfclean
    mask (string): Input mask in MIRIAD format
    imsize (int): image size in pixels
    """
    # Calculate the borders for the mask
    lowborder = 0.1 * imsize
    highborder = 0.9 * imsize
    # Do the masking and regrid
    maths = lib.miriad('maths')
    maths.out = mask + '_cut'
    maths.exp = '"<' + mask + '>"'
    maths.region = 'box"(' + str(lowborder) + ',' + str(lowborder) + ',' + str(highborder) + ',' + str(highborder) + ')"'
    maths.go()
    regrid = lib.miriad('regrid')
    regrid.in_ = mask + '_cut'
    regrid.out = mask + '_cut_regrid'
    regrid.axes = '1,2'
    regrid.tin = mask
    regrid.go()
    managefiles.director(self, 'rm', mask + '_cut')
    managefiles.director(self, 'rm', mask)
    managefiles.director(self, 'rn', mask, file_ = mask + '_cut_regrid')
Beispiel #16
0
 def split(self):
     """
     Splits out a certain frequency range from the datasets for the quicklook pipeline
     """
     if self.prepare_split:
         logger.info('Splitting channel ' +
                     str(self.prepare_split_startchannel) + ' until ' +
                     str(self.prepare_split_endchannel))
         # split the flux calibrator dataset
         logger.debug("self.fluxcal = {}".format(self.fluxcal))
         logger.debug("os.path.isdir(self.get_fluxcal_path()) = {}".format(
             os.path.isdir(self.get_fluxcal_path())))
         if self.fluxcal != '' and os.path.isdir(self.get_fluxcal_path()):
             fluxcal_split = 'split(vis = "' + self.get_fluxcal_path(
             ) + '", outputvis = "' + self.get_fluxcal_path().rstrip(
                 '.MS') + '_split.MS"' + ', spw = "0:' + str(
                     self.prepare_split_startchannel) + '~' + str(
                         self.prepare_split_endchannel
                     ) + '", datacolumn = "data")'
             lib.run_casa([fluxcal_split], log_output=True, timeout=3600)
             if os.path.isdir(self.get_fluxcal_path().rstrip('.MS') +
                              '_split.MS'):
                 subs_managefiles.director(self, 'rm',
                                           self.get_fluxcal_path())
                 subs_managefiles.director(
                     self,
                     'rn',
                     self.get_fluxcal_path(),
                     file_=self.get_fluxcal_path().rstrip('.MS') +
                     '_split.MS')
             else:
                 logger.warning(
                     'Splitting of flux calibrator dataset not successful!')
         else:
             logger.warning(
                 'Fluxcal not set or dataset not available! Cannot split flux calibrator dataset!'
             )
         # Split the polarised calibrator dataset
         logger.debug("self.polcal = {}".format(self.polcal))
         logger.debug("os.path.isdir(self.get_polcal_path()) = {}".format(
             os.path.isdir(self.get_polcal_path())))
         if self.polcal != '' and os.path.isdir(self.get_polcal_path()):
             polcal_split = 'split(vis = "' + self.get_polcal_path(
             ) + '", outputvis = "' + self.get_polcal_path().rstrip(
                 '.MS') + '_split.MS"' + ', spw = "0:' + str(
                     self.prepare_split_startchannel) + '~' + str(
                         self.prepare_split_endchannel
                     ) + '", datacolumn = "data")'
             lib.run_casa([polcal_split], log_output=True, timeout=3600)
             if os.path.isdir(self.get_polcal_path().rstrip('.MS') +
                              '_split.MS'):
                 subs_managefiles.director(self, 'rm',
                                           self.get_polcal_path())
                 subs_managefiles.director(
                     self,
                     'rn',
                     self.get_polcal_path(),
                     file_=self.get_polcal_path().rstrip('.MS') +
                     '_split.MS')
             else:
                 logger.warning(
                     'Splitting of polarised calibrator dataset not successful!'
                 )
         else:
             logger.warning(
                 'Polcal not set or dataset not available! Cannot split polarised calibrator dataset!'
             )
         # Split the target dataset
         logger.debug("self.target = {}".format(self.target))
         logger.debug("os.path.isdir(self.get_target_path()) = {}".format(
             os.path.isdir(self.get_target_path())))
         if self.target != '' and os.path.isdir(self.get_target_path()):
             target_split = 'split(vis = "' + self.get_target_path(
             ) + '", outputvis = "' + self.get_target_path().rstrip(
                 '.MS') + '_split.MS"' + ', spw = "0:' + str(
                     self.prepare_split_startchannel) + '~' + str(
                         self.prepare_split_endchannel
                     ) + '", datacolumn = "data")'
             lib.run_casa([target_split], log_output=True, timeout=3600)
             if os.path.isdir(self.get_target_path().rstrip('.MS') +
                              '_split.MS'):
                 subs_managefiles.director(self, 'rm',
                                           self.get_target_path())
                 subs_managefiles.director(
                     self,
                     'rn',
                     self.get_target_path(),
                     file_=self.get_target_path().rstrip('.MS') +
                     '_split.MS')
             else:
                 logger.warning(
                     'Splitting of target dataset not successful!')
         else:
             logger.warning(
                 'Target not set or dataset not available! Cannot split target dataset!'
             )
     else:
         pass
Beispiel #17
0
    def splitdata(self):
        """
        Applies calibrator corrections to data, splits the data into chunks in frequency and bins it to the given
        frequency resolution for the self-calibration
        """
        if self.selfcal_splitdata:
            subs_setinit.setinitdirs(self)
            subs_setinit.setdatasetnamestomiriad(self)
            subs_managefiles.director(self, 'ch', self.selfcaldir)
            logger.info(' Splitting of target data into individual frequency chunks started')
            if os.path.exists(self.selfcaldir + '/' + self.target):
                logger.info('Calibrator corrections already seem to have been applied #')
            else:
                logger.info('Applying calibrator solutions to target data before averaging #')
                uvaver = lib.miriad('uvaver')
                uvaver.vis = self.crosscaldir + '/' + self.target
                uvaver.out = self.selfcaldir + '/' + self.target
                uvaver.go()
                logger.info('Calibrator solutions to target data applied #')
            if self.selfcal_flagantenna != '':
                uvflag = lib.miriad('uvflag')
                uvflag.vis = self.selfcaldir + '/' + self.target
                uvflag.flagval = 'flag'
                uvflag.select = 'antenna(' + str(self.selfcal_flagantenna) + ')'
                uvflag.go()
            else:
                pass
            try:
                uv = aipy.miriad.UV(self.selfcaldir + '/' + self.target)
            except RuntimeError:
                raise ApercalException(' No data in your selfcal directory!')

            try:
                nsubband = len(uv['nschan'])  # Number of subbands in data
            except TypeError:
                nsubband = 1  # Only one subband in data since exception was triggered
            logger.info('Found ' + str(nsubband) + ' subband(s) in target data #')
            counter = 0  # Counter for naming the chunks and directories
            for subband in range(nsubband):
                logger.info('Started splitting of subband ' + str(subband) + ' #')
                if nsubband == 1:
                    numchan = uv['nschan']
                    finc = np.fabs(uv['sdf'])
                else:
                    numchan = uv['nschan'][subband]  # Number of channels per subband
                    finc = np.fabs(uv['sdf'][subband])  # Frequency increment for each channel
                subband_bw = numchan * finc  # Bandwidth of one subband
                subband_chunks = round(subband_bw / self.selfcal_splitdata_chunkbandwidth)
                # Round to the closest power of 2 for frequency chunks with the same bandwidth over the frequency
                # range of a subband
                subband_chunks = int(np.power(2, np.ceil(np.log(subband_chunks) / np.log(2))))
                if subband_chunks == 0:
                    subband_chunks = 1
                chunkbandwidth = (numchan / subband_chunks) * finc
                logger.info('Adjusting chunk size to ' + str(
                    chunkbandwidth) + ' GHz for regular gridding of the data chunks over frequency #')
                for chunk in range(subband_chunks):
                    logger.info(
                        'Starting splitting of data chunk ' + str(chunk) + ' for subband ' + str(subband) + ' #')
                    binchan = round(
                        self.selfcal_splitdata_channelbandwidth / finc)  # Number of channels per frequency bin
                    chan_per_chunk = numchan / subband_chunks
                    if chan_per_chunk % binchan == 0:  # Check if the freqeuncy bin exactly fits
                        logger.info('Using frequency binning of ' + str(
                            self.selfcal_splitdata_channelbandwidth) + ' for all subbands #')
                    else:
                        # Increase the frequency bin to keep a regular grid for the chunks
                        while chan_per_chunk % binchan != 0:
                            binchan = binchan + 1
                        else:
                            # Check if the calculated bin is not larger than the subband channel number
                            if chan_per_chunk >= binchan:
                                pass
                            else:
                                # Set the frequency bin to the number of channels in the chunk of the subband
                                binchan = chan_per_chunk
                        logger.info('Increasing frequency bin of data chunk ' + str(
                            chunk) + ' to keep bandwidth of chunks equal over the whole bandwidth #')
                        logger.info('New frequency bin is ' + str(binchan * finc) + ' GHz #')
                    nchan = int(chan_per_chunk / binchan)  # Total number of output channels per chunk
                    start = 1 + chunk * chan_per_chunk
                    width = int(binchan)
                    step = int(width)
                    subs_managefiles.director(self, 'mk', self.selfcaldir + '/' + str(counter).zfill(2))
                    uvaver = lib.miriad('uvaver')
                    uvaver.vis = self.selfcaldir + '/' + self.target
                    uvaver.out = self.selfcaldir + '/' + str(counter).zfill(2) + '/' + str(counter).zfill(2) + '.mir'
                    uvaver.select = "'" + 'window(' + str(subband + 1) + ')' + "'"
                    uvaver.line = "'" + 'channel,' + str(nchan) + ',' + str(start) + ',' + str(width) + ',' + str(
                        step) + "'"
                    uvaver.go()
                    counter = counter + 1
                    logger.info('Splitting of data chunk ' + str(chunk) + ' for subband ' + str(subband) + ' done #')
                logger.info('Splitting of data for subband ' + str(subband) + ' done #')
            logger.info(' Splitting of target data into individual frequency chunks done')
Beispiel #18
0
    def mosaic_continuum_mf(self):
        """Looks for all available stacked continuum images and mosaics them into one large image."""
        subs_setinit.setinitdirs(self)
        subs_setinit.setdatasetnamestomiriad(self)

        ##########################################################################################################
        # Check if the parameter is already in the parameter file and load it otherwise create the needed arrays #
        ##########################################################################################################

        mosaiccontinuummfstatus = get_param_def(
            self, 'mosaic_continuum_mf_status',
            False)  # Status of the continuum mf mosaic
        mosaiccontinuummfcontinuumstatus = get_param_def(
            self, 'mosaic_continuum_mf_continuumstatus',
            np.full(self.NBEAMS, False))  # Status of the continuum imaging
        mosaiccontinuummfcopystatus = get_param_def(
            self, 'mosaic_continuum_mf_copystatus',
            np.full(self.NBEAMS, False))  # Status of the copy of the images
        mosaiccontinuummfconvolstatus = get_param_def(
            self, 'mosaic_continuum_mf_convolstatus',
            np.full(self.NBEAMS, False))  # Status of the convolved images
        mosaiccontinuummfcontinuumbeamparams = get_param_def(
            self, 'mosaic_continuum_mf_continuumbeamparams',
            np.full((self.NBEAMS, 3),
                    np.nan))  # Beam sizes of the input images
        mosaiccontinuummfcontinuumimagestats = get_param_def(
            self, 'mosaic_continuum_mf_continuumimagestats',
            np.full((self.NBEAMS, 3),
                    np.nan))  # Image statistics of the input images

        # Start the mosaicking of the stacked continuum images
        if self.mosaic_continuum_mf:
            subs_setinit.setinitdirs(self)
            subs_setinit.setdatasetnamestomiriad(self)
            subs_managefiles.director(self, 'ch', self.mosdir + '/continuum')
            if not mosaiccontinuummfstatus:
                logger.info('Mosaicking multi-frequency continuum images')
                # Acquire the results and statistics from continuum mf imaging
                for b in range(self.NBEAMS):
                    mosaiccontinuummfcontinuumstatus[b] = get_param_def(
                        self, 'continuum_B' + str(b).zfill(2) +
                        '_targetbeams_mf_status', False)
                    if mosaiccontinuummfcontinuumstatus[b]:
                        finalminor = get_param_def(
                            self, 'continuum_B' + str(b).zfill(2) +
                            '_targetbeams_mf_final_minorcycle', np.nan)
                        subs_managefiles.director(
                            self,
                            'cp',
                            str(b).zfill(2) + '.fits',
                            file_=self.basedir + str(b).zfill(2) + '/' +
                            self.contsubdir + '/' + 'image_mf_' +
                            str(finalminor).zfill(2) + '.fits')
                        if os.path.isfile(str(b).zfill(2) + '.fits'):
                            mosaiccontinuummfcopystatus[b] = True
                            subs_convim.fitstomir(
                                str(b).zfill(2) + '.fits',
                                str(b).zfill(2))
                            subs_managefiles.director(
                                self, 'rm',
                                str(b).zfill(2) + '.fits')
                        else:
                            mosaiccontinuummfcopystatus[b] = False
                            logger.warning('Beam ' + str(b).zfill(2) +
                                           ' was not copied successfully!')
                # Copy the images over to the mosaic directory
                for b in range(self.NBEAMS):
                    if mosaiccontinuummfcontinuumstatus[
                            b] and mosaiccontinuummfcopystatus[b]:
                        # Get the image beam parameters and the image statistics
                        mosaiccontinuummfcontinuumimagestats[
                            b, :] = subs_imstats.getimagestats(
                                self,
                                str(b).zfill(2))
                        mosaiccontinuummfcontinuumbeamparams[
                            b, :] = subs_readmirhead.getbeamimage(
                                str(b).zfill(2))
                    else:
                        logger.warning(
                            'Skipping Beam ' + str(b).zfill(2) +
                            '! Continuum mf-imaging was not successful or continuum image not available!'
                        )
                # Calculate the synthesised beam and reject outliers (algorithm needs to be updated)
                rejbeams, beamparams = subs_combim.calc_synbeam(
                    mosaiccontinuummfcontinuumbeamparams)
                # Convolve all the images to the calculated beam
                for b in range(self.NBEAMS):
                    if mosaiccontinuummfcontinuumstatus[
                            b] and mosaiccontinuummfcopystatus[b]:
                        try:
                            convol = lib.miriad('convol')
                            convol.map = str(b).zfill(2)
                            convol.fwhm = str(beamparams[0]) + ',' + str(
                                beamparams[1])
                            convol.pa = str(beamparams[2])
                            convol.options = 'final'
                            convol.out = str(b).zfill(2) + '_cv'
                            convol.go()
                            if os.path.isdir(str(b).zfill(2) + '_cv'):
                                mosaiccontinuummfconvolstatus[b] = True
                            else:
                                mosaiccontinuummfconvolstatus[b] = False
                                logger.warning(
                                    'Beam ' + str(b).zfill(2) +
                                    ' could not be convolved to the calculated beam size! File not there!'
                                )
                        except:
                            mosaiccontinuummfconvolstatus[b] = False
                            logger.warning(
                                'Beam ' + str(b).zfill(2) +
                                ' could not be convolved to the calculated beam size!'
                            )
                # Combine all the images using linmos (needs to be updated with proper primary beam model)
                linmosimages = ''
                linmosrms = ''
                for b in range(self.NBEAMS):
                    if mosaiccontinuummfcontinuumstatus[
                            b] and mosaiccontinuummfcopystatus[
                                b] and mosaiccontinuummfconvolstatus[b]:
                        linmosimages = linmosimages + str(b).zfill(2) + '_cv,'
                        linmosrms = linmosrms + str(
                            subs_imstats.getimagestats(
                                self,
                                str(b).zfill(2) + '_cv')[2]) + ','
                linmos = lib.miriad('linmos')
                linmos.in_ = linmosimages.rstrip(',')
                linmos.rms = linmosrms.rstrip(',')
                linmos.out = self.target.rstrip('.MS') + '_mf'
                linmos.go()
                if os.path.isdir(self.target.rstrip('.MS') + '_mf'):
                    mosaiccontinuummfstatus = True
                    subs_convim.mirtofits(
                        self.target.rstrip('.MS') + '_mf',
                        self.target.rstrip('.MS') + '_mf.fits')
                    logger.info(
                        'Mosaicking of multi-frequency image successful!')
                else:
                    mosaiccontinuummfstatus = False
                    logger.error(
                        'Multi-freqeuncy mosaic was not created successfully!')
            else:
                mosaiccontinuummfstatus = True
                logger.info(
                    'Multi-frequency continuum mosaic was already successfully created!'
                )

        # Save the derived parameters to the parameter file

        subs_param.add_param(self, 'mosaic_continuum_mf_status',
                             mosaiccontinuummfstatus)
        subs_param.add_param(self, 'mosaic_continuum_mf_continuumstatus',
                             mosaiccontinuummfcontinuumstatus)
        subs_param.add_param(self, 'mosaic_continuum_mf_copystatus',
                             mosaiccontinuummfcopystatus)
        subs_param.add_param(self, 'mosaic_continuum_mf_convolstatus',
                             mosaiccontinuummfconvolstatus)
        subs_param.add_param(self, 'mosaic_continuum_mf_continuumbeamparams',
                             mosaiccontinuummfcontinuumbeamparams)
        subs_param.add_param(self, 'mosaic_continuum_mf_continuumimagestats',
                             mosaiccontinuummfcontinuumimagestats)
Beispiel #19
0
    def ms2miriad(self):
        """
        Converts the data from MS to MIRIAD format via UVFITS using drivecasa. Does it for the flux calibrator,
        polarisation calibrator, and target field independently.
        """
        subs_setinit.setinitdirs(self)

        ccalbeam = 'ccal_B' + str(self.beam).zfill(2)
        cbeam = 'convert_B' + str(self.beam).zfill(2)

        # Read the parameters from crosscal
        # and check before doing anything

        # Status of the solution transfer for the target, flux calibrator and polarisation calibrator
        ccal_targetbeams_transfer = get_param_def(
            self, ccalbeam + '_targetbeams_transfer', False)
        ccal_calibration_calibrator_finished = get_param_def(
            self, ccalbeam + '_calibration_calibrator_finished', False)

        if not ccal_calibration_calibrator_finished:
            error = "Beam {}: Will not convert files to miriad format because cross-calibration failed.".format(
                str(self.beam).zfill(2))
            logger.error(error)
            raise ApercalException(error)
        elif not ccal_targetbeams_transfer:
            error = "Beam {}: Will not convert files to miriad format because cross-calibration solutions were not successfully applied to target.".format(
                str(self.beam).zfill(2))
            logger.error(error)
            raise ApercalException(error)

        # Create the parameters for the parameter file for converting from MS to UVFITS format

        # Flux calibrator MS dataset available?
        convertfluxcalmsavailable = get_param_def(
            self, cbeam + '_fluxcal_MSavailable', False)

        # Polarised calibrator MS dataset available?
        convertpolcalmsavailable = get_param_def(self,
                                                 cbeam + '_polcal_MSavailable',
                                                 False)

        # Target beam MS dataset available?
        converttargetbeamsmsavailable = get_param_def(
            self, cbeam + '_targetbeams_MSavailable', False)

        # Flux calibrator MS dataset converted to UVFITS?
        convertfluxcalms2uvfits = get_param_def(self,
                                                cbeam + '_fluxcal_MS2UVFITS',
                                                False)

        # Polarised calibrator MS dataset converted to UVFITS?
        convertpolcalms2uvfits = get_param_def(self,
                                               cbeam + '_polcal_MS2UVFITS',
                                               False)

        # Target beam MS dataset converted to UVFITS?
        converttargetbeamsms2uvfits = get_param_def(
            self, cbeam + '_targetbeams_MS2UVFITS', False)

        # Flux calibrator UVFITS dataset available?
        convertfluxcaluvfitsavailable = get_param_def(
            self, cbeam + '_fluxcal_UVFITSavailable', False)

        # Polarised calibrator UVFITS dataset available?
        convertpolcaluvfitsavailable = get_param_def(
            self, cbeam + '_polcal_UVFITSavailable', False)

        # Target beam UVFITS dataset available?
        converttargetbeamsuvfitsavailable = get_param_def(
            self, cbeam + '_targetbeams_UVFITSavailable', False)

        # Flux calibrator UVFITS dataset converted to MIRIAD?
        convertfluxcaluvfits2miriad = get_param_def(
            self, cbeam + '_fluxcal_UVFITS2MIRIAD', False)

        # Polarised calibrator UVFITS dataset converted to MIRIAD?
        convertpolcaluvfits2miriad = get_param_def(
            self, cbeam + '_polcal_UVFITS2MIRIAD', False)

        # Target beam UVFITS dataset converted to MIRIAD?
        converttargetbeamsuvfits2miriad = get_param_def(
            self, cbeam + '_targetbeams_UVFITS2MIRIAD', False)

        # Check which datasets are available in MS format #
        if self.fluxcal != '':
            convertfluxcalmsavailable = path.isdir(self.get_fluxcal_path())
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Flux calibrator dataset not specified. Cannot convert flux calibrator!'
            )
        if self.polcal != '':
            convertpolcalmsavailable = path.isdir(self.get_polcal_path())
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
            )
        if self.target != '':
            converttargetbeamsmsavailable = path.isdir(self.get_target_path())
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Target beam dataset not specified. Cannot convert target beams!'
            )

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, cbeam + '_fluxcal_MSavailable',
                             convertfluxcalmsavailable)
        subs_param.add_param(self, cbeam + '_polcal_MSavailable',
                             convertpolcalmsavailable)
        subs_param.add_param(self, cbeam + '_targetbeams_MSavailable',
                             converttargetbeamsmsavailable)

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcalmsavailable:
                        logger.debug(
                            'Beam ' + self.beam +
                            ': Converting flux calibrator dataset from MS to UVFITS format.'
                        )
                        subs_managefiles.director(
                            self,
                            'mk',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fluxcal_ms = self.get_fluxcal_path()

                        # convert only if corrected data column exists
                        if subs_msutils.has_correcteddata(fluxcal_ms):
                            datacolumn = "corrected"

                            fluxcal_fits = mspath_to_fitspath(
                                self.get_crosscalsubdir_path(), fluxcal_ms)

                            fc_convert = exportuvfits_cmd.format(
                                vis=self.get_fluxcal_path(),
                                fits=fluxcal_fits,
                                datacolumn=datacolumn)

                            lib.run_casa([fc_convert], timeout=3600)
                            if path.isfile(fluxcal_fits):
                                convertfluxcalms2uvfits = True
                                logger.info(
                                    'Beam ' + self.beam +
                                    ': Converted flux calibrator dataset from MS to UVFITS format!'
                                )
                            else:
                                convertfluxcalms2uvfits = False
                                logger.warning(
                                    'Beam ' + self.beam +
                                    ': Could not convert flux calibrator dataset {} '
                                    'from MS to UVFITS format!'.format(
                                        fluxcal_fits))
                        else:
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Flux calibrator does not have a corrected_data column! Not '
                                'converting flux calibrator dataset!')
                    else:
                        logger.warning(
                            'Beam ' + self.beam +
                            ': Flux calibrator dataset {} not available!'.
                            format(self.get_fluxcal_path()))
                else:
                    logger.info(
                        'Beam ' + self.beam +
                        ': Flux calibrator dataset was already converted from MS to UVFITS format'
                    )
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Flux calibrator dataset not specified. Cannot convert flux calibrator!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting flux calibrator dataset!')

        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcalmsavailable:
                        logger.debug(
                            'Beam ' + self.beam +
                            ': Converting polarised calibrator dataset from MS to UVFITS format.'
                        )
                        subs_managefiles.director(
                            self,
                            'mk',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        polcal_ms = self.get_polcal_path()

                        # convert only if corrected data column exists
                        if subs_msutils.has_correcteddata(polcal_ms):
                            datacolumn = "corrected"

                            polcal_fits = mspath_to_fitspath(
                                self.get_crosscalsubdir_path(), polcal_ms)

                            pc_convert = exportuvfits_cmd.format(
                                vis=polcal_ms,
                                fits=polcal_fits,
                                datacolumn=datacolumn)

                            lib.run_casa([pc_convert], timeout=3600)
                            if path.isfile(polcal_fits):
                                convertpolcalms2uvfits = True
                                logger.info(
                                    'Beam ' + self.beam +
                                    ': Converted polarised calibrator dataset from MS to UVFITS format!'
                                )
                            else:
                                convertpolcalms2uvfits = False
                                logger.warning(
                                    'Beam ' + self.beam +
                                    ': Could not convert polarised calibrator dataset from MS to UVFITS format!'
                                )
                        else:
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Polarised calibrator does not have a corrected_data column! Not '
                                'converting polarised calibrator dataset!')

                    else:
                        logger.warning(
                            'Beam ' + self.beam +
                            ': Polarised calibrator dataset not available!')
                else:
                    logger.info(
                        'Beam ' + self.beam +
                        ': Polarised calibrator dataset was already converted from MS to UVFITS format'
                    )
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting polarised calibrator dataset!')

        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info(
                    'Beam ' + self.beam +
                    ': Converting target beam dataset from MS to UVFITS format.'
                )
                if not converttargetbeamsuvfits2miriad:
                    if converttargetbeamsmsavailable:
                        subs_managefiles.director(
                            self,
                            'mk',
                            self.get_crosscalsubdir_path(),
                            verbose=False)

                        target_ms = self.get_target_path()
                        target_fits = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), target_ms)

                        # only convert if corrected data column exists
                        if subs_msutils.has_correcteddata(target_ms):
                            datacolumn = "corrected"

                            tg_convert = exportuvfits_cmd.format(
                                vis=target_ms,
                                fits=target_fits,
                                datacolumn=datacolumn)

                            lib.run_casa([tg_convert], timeout=10000)
                            if path.isfile(target_fits):
                                converttargetbeamsms2uvfits = True
                                logger.debug(
                                    'Beam ' + self.beam +
                                    ': Converted dataset of target beam  from MS to UVFITS format!'
                                )
                            else:
                                converttargetbeamsms2uvfits = False
                                logger.warning(
                                    'Beam ' + self.beam +
                                    ': Could not convert dataset for target beam from MS to UVFITS format!'
                                )
                        else:
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Target beam dataset does not have a corrected_data column! Not '
                                'converting target beam dataset!')

                    else:
                        logger.warning('Beam ' + self.beam +
                                       ': Target beam dataset not available!')
                else:
                    logger.info('Beam ' + self.beam +
                                ': Target beam dataset was already '
                                'converted from MS to UVFITS format')
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Target beam dataset not specified. Cannot convert target beam dataset!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting target beam dataset!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, cbeam + '_fluxcal_MS2UVFITS',
                             convertfluxcalms2uvfits)
        subs_param.add_param(self, cbeam + '_polcal_MS2UVFITS',
                             convertpolcalms2uvfits)
        subs_param.add_param(self, cbeam + '_targetbeams_MS2UVFITS',
                             converttargetbeamsms2uvfits)

        # Check which datasets are available in UVFITS format #
        if self.fluxcal != '':
            crosscal_fluxcal = mspath_to_fitspath(
                self.get_crosscalsubdir_path(), self.fluxcal)
            convertfluxcaluvfitsavailable = path.isfile(crosscal_fluxcal)
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Flux calibrator dataset not specified. Cannot convert flux calibrator!'
            )
        if self.polcal != '':
            crosscal_polcal = mspath_to_fitspath(
                self.get_crosscalsubdir_path(), self.polcal)
            convertpolcaluvfitsavailable = path.isfile(crosscal_polcal)
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
            )
        if self.target != '':
            crosscal_target = mspath_to_fitspath(
                self.get_crosscalsubdir_path(), self.target)
            converttargetbeamsuvfitsavailable = path.isfile(crosscal_target)
        else:
            logger.warning(
                'Beam ' + self.beam +
                ': Target beam dataset not specified. Cannot convert target beam!'
            )

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, cbeam + '_fluxcal_UVFITSavailable',
                             convertfluxcaluvfitsavailable)
        subs_param.add_param(self, cbeam + '_polcal_UVFITSavailable',
                             convertpolcaluvfitsavailable)
        subs_param.add_param(self, cbeam + '_targetbeams_UVFITSavailable',
                             converttargetbeamsuvfitsavailable)

        # Convert the available UVFITS-datasets to MIRIAD format #

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcaluvfitsavailable:
                        logger.debug(
                            'Beam ' + self.beam +
                            ': Converting flux calibrator dataset from UVFITS to MIRIAD format.'
                        )
                        subs_managefiles.director(
                            self,
                            'ch',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), self.fluxcal)
                        fits.out = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(),
                            self.fluxcal,
                            ext='mir')
                        fits.go()
                        if path.isdir(fits.out):
                            convertfluxcaluvfits2miriad = True
                            logger.info(
                                'Beam ' + self.beam +
                                ': Converted flux calibrator dataset from UVFITS to MIRIAD format!'
                            )
                        else:
                            convertfluxcaluvfits2miriad = False
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Could not convert flux calibrator dataset {} from UVFITS to '
                                'MIRIAD format!'.format(fits.out))
                    else:
                        logger.warning(
                            'Beam ' + self.beam +
                            ': Flux calibrator dataset not available!')
                else:
                    logger.info(
                        'Beam ' + self.beam +
                        ': Flux calibrator dataset was already converted from UVFITS to MIRIAD format'
                    )
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Flux calibrator dataset not specified. Cannot convert flux calibrator!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting flux calibrator dataset!')
        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcaluvfitsavailable:
                        logger.debug(
                            'Beam ' + self.beam +
                            ': Converting polarised calibrator dataset from UVFITS to MIRIAD format.'
                        )
                        subs_managefiles.director(
                            self,
                            'ch',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), self.polcal)
                        fits.out = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(),
                            self.polcal,
                            ext='mir')
                        fits.go()
                        if path.isdir(fits.out):
                            convertpolcaluvfits2miriad = True
                            logger.info(
                                'Beam ' + self.beam +
                                ': Converted polarised calibrator dataset from UVFITS to MIRIAD format!'
                            )
                        else:
                            convertpolcaluvfits2miriad = False
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Could not convert polarised calibrator dataset from UVFITS to MIRIAD format!'
                            )
                    else:
                        logger.warning(
                            'Beam ' + self.beam +
                            ': Polarised calibrator dataset not available!')
                else:
                    logger.info(
                        'Beam ' + self.beam +
                        ': Polarised calibrator dataset was already converted from UVFITS to MIRIAD format'
                    )
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting polarised calibrator dataset!')
        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info(
                    'Beam ' + self.beam +
                    ': Converting target beam dataset from UVFITS to MIRIAD format.'
                )
                if not converttargetbeamsuvfits2miriad:
                    if converttargetbeamsuvfitsavailable:
                        subs_managefiles.director(
                            self,
                            'ch',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), self.target)
                        fits.out = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(),
                            self.target,
                            ext='mir')
                        fits.go()
                        if path.isdir(fits.out):
                            converttargetbeamsuvfits2miriad = True
                            logger.debug(
                                'Beam ' + self.beam +
                                ': Converted target beam dataset from '
                                'UVFITS to MIRIAD format!')
                        else:
                            converttargetbeamsuvfits2miriad = False
                            logger.warning(
                                'Beam ' + self.beam +
                                ': Could not convert target beam dataset '
                                '{} from UVFITS to MIRIAD format!'.format(
                                    fits.out))
                    else:
                        logger.warning('Beam ' + self.beam +
                                       ': Target beam dataset not available!')
                else:
                    logger.info('Beam ' + self.beam +
                                ': Target beam dataset was already converted '
                                'from MS to UVFITS format')
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': Target beam dataset not specified. Cannot convert target beam datasets!'
                )
        else:
            logger.warning('Beam ' + self.beam +
                           ': Not converting target beam dataset!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, cbeam + '_fluxcal_UVFITS2MIRIAD',
                             convertfluxcaluvfits2miriad)
        subs_param.add_param(self, cbeam + '_polcal_UVFITS2MIRIAD',
                             convertpolcaluvfits2miriad)
        subs_param.add_param(self, cbeam + '_targetbeams_UVFITS2MIRIAD',
                             converttargetbeamsuvfits2miriad)

        if self.convert_averagems and self.subdirification:
            logger.info('Beam ' + self.beam +
                        ': Averaging down target measurement set')
            average_cmd = 'mstransform(vis="{vis}", outputvis="{outputvis}", chanaverage=True, chanbin=64)'
            vis = self.get_target_path()
            outputvis = vis.replace(".MS", "_avg.MS")
            lib.run_casa([average_cmd.format(vis=vis, outputvis=outputvis)],
                         timeout=10000)

        # Remove measurement sets if wanted
        if self.convert_removems and self.subdirification:
            logger.info('Beam ' + self.beam + ': Removing measurement sets')
            vis = self.get_target_path()
            if path.exists(vis):
                subs_managefiles.director(self, 'rm', vis)

        # Remove the UVFITS files if wanted
        if self.convert_removeuvfits and self.subdirification:
            logger.info('Beam ' + self.beam + ': Removing all UVFITS files')
            if self.fluxcal != '' and path.exists(
                    mspath_to_fitspath(
                        self.get_crosscalsubdir_path(),
                        self.fluxcal)) and convertfluxcalms2uvfits:
                subs_managefiles.director(
                    self, 'rm',
                    mspath_to_fitspath(self.get_crosscalsubdir_path(),
                                       self.fluxcal))
                logger.info('Beam ' + self.beam +
                            ': Removed fluxcal UVFITS files')
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': No fluxcal UVFITS file available for removing')
            if self.polcal != '' and path.exists(
                    mspath_to_fitspath(
                        self.get_crosscalsubdir_path(),
                        self.polcal)) and convertpolcalms2uvfits:
                subs_managefiles.director(
                    self, 'rm',
                    mspath_to_fitspath(self.get_crosscalsubdir_path(),
                                       self.polcal))
                logger.info('Beam ' + self.beam +
                            ': Removed polcal UVFITS files')
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': No polcal UVFITS file available for removing')
            if self.target != '' and path.exists(
                    mspath_to_fitspath(
                        self.get_crosscalsubdir_path(),
                        self.target)) and convertfluxcalms2uvfits:
                subs_managefiles.director(
                    self, 'rm',
                    mspath_to_fitspath(self.get_crosscalsubdir_path(),
                                       self.target))
                logger.info('Beam ' + self.beam +
                            ': Removed target UVFITS files')
            else:
                logger.warning(
                    'Beam ' + self.beam +
                    ': No target UVFITS file available for removing')
Beispiel #20
0
    def parametric(self):
        """
        Parametric self calibration using an NVSS/FIRST skymodel and calculating spectral indices by source matching
        with WENSS.
        """
        if self.selfcal_parametric:
            subs_setinit.setinitdirs(self)
            subs_setinit.setdatasetnamestomiriad(self)
            logger.info(' Doing parametric self calibration')
            subs_managefiles.director(self, 'ch', self.selfcaldir)
            for chunk in self.list_chunks():
                logger.info('Starting parametric self calibration routine on chunk ' + chunk + ' #')
                subs_managefiles.director(self, 'ch', self.selfcaldir + '/' + chunk)
                subs_managefiles.director(self, 'mk', self.selfcaldir + '/' + chunk + '/' + 'pm')
                parametric_textfile = lsm.lsm_model(chunk + '.mir', self.selfcal_parametric_skymodel_radius,
                                                    self.selfcal_parametric_skymodel_cutoff,
                                                    self.selfcal_parametric_skymodel_distance)
                lsm.write_model(self.selfcaldir + '/' + chunk + '/' + 'pm' + '/model.txt', parametric_textfile)
                logger.info('Creating model from textfile model.txt for chunk ' + chunk + ' #')
                uv = aipy.miriad.UV(self.selfcaldir + '/' + chunk + '/' + chunk + '.mir')
                freq = uv['sfreq']
                uvmodel = lib.miriad('uvmodel')
                uvmodel.vis = chunk + '.mir'
                parametric_modelfile = open(self.selfcaldir + '/' + str(chunk) + '/' + 'pm' + '/model.txt', 'r')
                for n, source in enumerate(parametric_modelfile.readlines()):
                    if n == 0:
                        uvmodel.options = 'replace,mfs'
                    else:
                        uvmodel.options = 'add,mfs'
                    uvmodel.offset = source.split(',')[0] + ',' + source.split(',')[1]
                    uvmodel.flux = source.split(',')[2] + ',i,' + str(freq) + ',' + source.split(',')[4].rstrip(
                        '\n') + ',0,0'
                    uvmodel.out = 'pm/tmp' + str(n)
                    uvmodel.go()
                    uvmodel.vis = uvmodel.out
                subs_managefiles.director(self, 'rn', 'pm/model', uvmodel.out)  # Rename the last modelfile to model
                subs_managefiles.director(self, 'rm', 'pm/tmp*')  # Remove all the obsolete modelfiles

                logger.info('Doing parametric self-calibration on chunk {} with solution interval {} min'
                            'and uvrange limits of {}~{} klambda #'.format(chunk, self.selfcal_parametric_solint,
                                                                           self.selfcal_parametric_uvmin,
                                                                           self.selfcal_parametric_uvmax))

                selfcal = lib.miriad('selfcal')
                selfcal.vis = chunk + '.mir'
                selfcal.model = 'pm/model'
                selfcal.interval = self.selfcal_parametric_solint
                selfcal.select = "'" + 'uvrange(' + str(self.selfcal_parametric_uvmin) + ',' + str(
                    self.selfcal_parametric_uvmax) + ')' + "'"
                # Choose reference antenna if given
                if self.selfcal_refant == '':
                    pass
                else:
                    selfcal.refant = self.selfcal_refant
                # Do amplitude calibration if wanted
                if self.selfcal_parametric_amp:
                    selfcal.options = 'mfs,amp'
                else:
                    selfcal.options = 'mfs'
                selfcal.go()
                logger.info('Parametric self calibration routine on chunk ' + chunk + ' done! #')
            logger.info(' Parametric self calibration done')
        else:
            logger.info(' Parametric self calibration disabled')
Beispiel #21
0
    def ms2miriad(self):
        """
        Converts the data from MS to MIRIAD format via UVFITS using drivecasa. Does it for the flux calibrator,
        polarisation calibrator, and target field independently.
        """
        subs_setinit.setinitdirs(self)
        nbeams = 37

        # Create the parameters for the parameter file for converting from MS to UVFITS format

        # Flux calibrator MS dataset available?
        convertfluxcalmsavailable = get_param_def(self, 'convert_fluxcal_MSavailable', False)

        # Polarised calibrator MS dataset available?
        convertpolcalmsavailable = get_param_def(self, 'convert_polcal_MSavailable', False)

        # Target beam MS dataset available?
        converttargetbeamsmsavailable = get_param_def(self, 'convert_targetbeams_MSavailable', np.full(nbeams, False))

        # Flux calibrator MS dataset converted to UVFITS?
        convertfluxcalms2uvfits = get_param_def(self, 'convert_fluxcal_MS2UVFITS', False)

        # Polarised calibrator MS dataset converted to UVFITS?
        convertpolcalms2uvfits = get_param_def(self, 'convert_polcal_MS2UVFITS', False)

        # Target beam MS dataset converted to UVFITS?
        converttargetbeamsms2uvfits = get_param_def(self, 'convert_targetbeams_MS2UVFITS', np.full(nbeams, False))

        # Flux calibrator UVFITS dataset available?
        convertfluxcaluvfitsavailable = get_param_def(self, 'convert_fluxcal_UVFITSavailable', False)

        # Polarised calibrator UVFITS dataset available?
        convertpolcaluvfitsavailable = get_param_def(self, 'convert_polcal_UVFITSavailable', False)

        # Target beam UVFITS dataset available?
        converttargetbeamsuvfitsavailable = get_param_def(self, 'convert_targetbeams_UVFITSavailable',
                                                          np.full(nbeams, False))

        # Flux calibrator UVFITS dataset converted to MIRIAD?
        convertfluxcaluvfits2miriad = get_param_def(self, 'convert_fluxcal_UVFITS2MIRIAD', False)

        # Polarised calibrator UVFITS dataset converted to MIRIAD?
        convertpolcaluvfits2miriad = get_param_def(self, 'convert_polcal_UVFITS2MIRIAD', False)
        # Target beam UVFITS dataset converted to MIRIAD?

        converttargetbeamsuvfits2miriad = get_param_def(self, 'convert_targetbeams_UVFITS2MIRIAD',
                                                        np.full(nbeams, False))

        # Check which datasets are available in MS format #
        if self.fluxcal != '':
            convertfluxcalmsavailable = os.path.isdir(self.basedir + '00' + '/' + self.rawsubdir + '/' + self.fluxcal)
        else:
            logger.warning('Flux calibrator dataset not specified. Cannot convert flux calibrator!')
        if self.polcal != '':
            convertpolcalmsavailable = os.path.isdir(self.basedir + '00' + '/' + self.rawsubdir + '/' + self.polcal)
        else:
            logger.warning('Polarised calibrator dataset not specified. Cannot convert polarised calibrator!')
        if self.target != '':
            for b in range(nbeams):
                converttargetbeamsmsavailable[b] = os.path.isdir(
                    self.basedir + str(b).zfill(2) + '/' + self.rawsubdir + '/' + self.target)
        else:
            logger.warning('Target beam dataset not specified. Cannot convert target beams!')

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_MSavailable', convertfluxcalmsavailable)
        subs_param.add_param(self, 'convert_polcal_MSavailable', convertpolcalmsavailable)
        subs_param.add_param(self, 'convert_targetbeams_MSavailable', converttargetbeamsmsavailable)

        # Convert the available MS-datasets to UVFITS #
        raw_convert_cmd = 'exportuvfits(vis="{basedir}00/{rawsubdir}/{cal}", ' \
                          'fitsfile="{basedir}00/{crosscalsubdir}/{calbase}UVFITS", datacolumn="{datacolumn}", ' \
                          'combinespw=True, padwithflags=True, multisource=True, writestation=True)'

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcalmsavailable:
                        logger.debug('Converting flux calibrator dataset from MS to UVFITS format.')
                        subs_managefiles.director(self, 'mk', self.basedir + '00' + '/' + self.crosscalsubdir,
                                                  verbose=False)
                        path = self.basedir + '00' + '/' + self.rawsubdir + '/' + self.fluxcal
                        if subs_msutils.has_correcteddata(path):
                            fc_convert = raw_convert_cmd.format(basedir=self.basedir, rawsubdir=self.rawsubdir,
                                                                cal=self.fluxcal, calbase=self.fluxcal[:-2],
                                                                crosscalsubdir=self.crosscalsubdir,
                                                                datacolumn="corrected")
                        else:
                            fc_convert = raw_convert_cmd.format(basedir=self.basedir, rawsubdir=self.rawsubdir,
                                                                cal=self.fluxcal, calbase=self.fluxcal[:-2],
                                                                crosscalsubdir=self.crosscalsubdir,
                                                                datacolumn="data")
                            logger.warning('Flux calibrator does not have a corrected_data column! Using uncorrected'
                                           'data for conversion!')

                        lib.run_casa([fc_convert], timeout=3600)
                        if os.path.isfile(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip(
                                'MS') + 'UVFITS'):
                            convertfluxcalms2uvfits = True
                            logger.info('Converted flux calibrator dataset from MS to UVFITS format!')
                        else:
                            convertfluxcalms2uvfits = False
                            logger.warning('Could not convert flux calibrator dataset from MS to UVFITS format!')
                    else:
                        logger.warning('Flux calibrator dataset not available!')
                else:
                    logger.info('Flux calibrator dataset was already converted from MS to UVFITS format')
            else:
                logger.warning('Flux calibrator dataset not specified. Cannot convert flux calibrator!')
        else:
            logger.warning('Not converting flux calibrator dataset!')

        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcalmsavailable:
                        logger.debug('Converting polarised calibrator dataset from MS to UVFITS format.')
                        subs_managefiles.director(self, 'mk', self.basedir + '00' + '/' + self.crosscalsubdir,
                                                  verbose=False)
                        path = self.basedir + '00' + '/' + self.rawsubdir + '/' + self.polcal
                        if subs_msutils.has_correcteddata(path):
                            pc_convert = raw_convert_cmd.format(basedir=self.basedir, rawsubdir=self.rawsubdir,
                                                                cal=self.fluxcal, calbase=self.polcal[:-2],
                                                                crosscalsubdir=self.crosscalsubdir,
                                                                datacolumn="corrected")
                        else:
                            pc_convert = raw_convert_cmd.format(basedir=self.basedir, rawsubdir=self.rawsubdir,
                                                                cal=self.fluxcal, calbase=self.polcal[:-2],
                                                                crosscalsubdir=self.crosscalsubdir,
                                                                datacolumn="data")
                            logger.warning('Polarised calibrator does not have a corrected_data column! Using'
                                           'uncorrected data for conversion!')

                        lib.run_casa([pc_convert], timeout=3600)
                        if os.path.isfile(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip(
                                'MS') + 'UVFITS'):
                            convertpolcalms2uvfits = True
                            logger.info('Converted polarised calibrator dataset from MS to UVFITS format!')
                        else:
                            convertpolcalms2uvfits = False
                            logger.warning('Could not convert polarised calibrator dataset from MS to UVFITS format!')
                    else:
                        logger.warning('Polarised calibrator dataset not available!')
                else:
                    logger.info('Polarised calibrator dataset was already converted from MS to UVFITS format')
            else:
                logger.warning('Polarised calibrator dataset not specified. Cannot convert polarised calibrator!')
        else:
            logger.warning('Not converting polarised calibrator dataset!')

        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info('Converting target beam datasets from MS to UVFITS format.')
                if self.convert_targetbeams == 'all':
                    datasets = glob.glob(self.basedir + '[0-9][0-9]' + '/' + self.rawsubdir + '/' + self.target)
                    logger.debug('Converting all available target beam datasets')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = [self.basedir + str(b).zfill(2) + '/' + self.rawsubdir + '/' + self.target for b in
                                beams]
                    logger.debug('Converting all selected target beam datasets')
                for vis in datasets:
                    if not converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])]:
                        if converttargetbeamsmsavailable[int(vis.split('/')[-3])]:
                            subs_managefiles.director(self, 'mk',
                                                      self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir,
                                                      verbose=False)

                            beam_dataset = vis.split('/')[-3]
                            raw_tg_cmd = 'exportuvfits(vis="{basedir}{beam_dataset}/{rawsubdir}/{target}", ' \
                                         'fitsfile="{basedir}{beam_dataset}/{crosscalsubdir}/{targetbase}UVFITS", ' \
                                         'datacolumn="{datacolumn}", combinespw=True, padwithflags=True, ' \
                                         'multisource=True, writestation=True)'

                            path = self.basedir + beam_dataset + '/' + self.rawsubdir + '/' + self.target
                            if subs_msutils.has_correcteddata(path):
                                datacolumn = "corrected"
                            else:
                                datacolumn = "data"
                                logger.warning('Target beam dataset {} does not have a corrected_data column! Using '
                                               'uncorrected data for conversion!'.format(beam_dataset))

                            tg_convert = raw_tg_cmd.format(basedir=self.basedir, rawsubdir=self.rawsubdir,
                                                           target=self.target, crosscalsubdir=self.crosscalsubdir,
                                                           targetbase=self.target.rstrip('MS'), datacolumn=datacolumn,
                                                           beam_dataset=beam_dataset)

                            lib.run_casa([tg_convert], timeout=7200)
                            if os.path.isfile(self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' +
                                              self.target.rstrip('MS') + 'UVFITS'):
                                converttargetbeamsms2uvfits[int(vis.split('/')[-3])] = True
                                logger.debug('Converted dataset of target beam ' + vis.split('/')[
                                    -3] + ' from MS to UVFITS format!')
                            else:
                                converttargetbeamsms2uvfits[int(vis.split('/')[-3])] = False
                                logger.warning('Could not convert dataset for target beam ' + vis.split('/')[
                                    -3] + ' from MS to UVFITS format!')
                        else:
                            logger.warning('Dataset for target beam ' + vis.split('/')[-3] + ' not available!')
                    else:
                        logger.info('Dataset for target beam ' + vis.split('/')[
                            -3] + ' was already converted from MS to UVFITS format')
            else:
                logger.warning('Target beam dataset(s) not specified. Cannot convert target beam datasets!')
        else:
            logger.warning('Not converting target beam dataset(s)!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_MS2UVFITS', convertfluxcalms2uvfits)
        subs_param.add_param(self, 'convert_polcal_MS2UVFITS', convertpolcalms2uvfits)
        subs_param.add_param(self, 'convert_targetbeams_MS2UVFITS', converttargetbeamsms2uvfits)

        # Check which datasets are available in UVFITS format #
        if self.fluxcal != '':
            convertfluxcaluvfitsavailable = os.path.isfile(
                self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip('MS') + 'UVFITS')
        else:
            logger.warning('Flux calibrator dataset not specified. Cannot convert flux calibrator!')
        if self.polcal != '':
            convertpolcaluvfitsavailable = os.path.isfile(
                self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip('MS') + 'UVFITS')
        else:
            logger.warning('Polarised calibrator dataset not specified. Cannot convert polarised calibrator!')
        if self.target != '':
            for b in range(nbeams):
                converttargetbeamsuvfitsavailable[b] = os.path.isfile(
                    self.basedir + str(b).zfill(2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip(
                        'MS') + 'UVFITS')
        else:
            logger.warning('Target beam dataset not specified. Cannot convert target beams!')

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_UVFITSavailable', convertfluxcaluvfitsavailable)
        subs_param.add_param(self, 'convert_polcal_UVFITSavailable', convertpolcaluvfitsavailable)
        subs_param.add_param(self, 'convert_targetbeams_UVFITSavailable', converttargetbeamsuvfitsavailable)

        # Convert the available UVFITS-datasets to MIRIAD format #

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcaluvfitsavailable:
                        logger.debug('Converting flux calibrator dataset from UVFITS to MIRIAD format.')
                        subs_managefiles.director(self, 'ch', self.basedir + '00' + '/' + self.crosscalsubdir,
                                                  verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip(
                            'MS') + 'UVFITS'
                        fits.out = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip(
                            'MS') + 'mir'
                        fits.go()
                        if os.path.isdir(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip(
                                'MS') + 'mir'):
                            convertfluxcaluvfits2miriad = True
                            logger.info('Converted flux calibrator dataset from UVFITS to MIRIAD format!')
                        else:
                            convertfluxcaluvfits2miriad = False
                            logger.warning('Could not convert flux calibrator dataset from UVFITS to MIRIAD format!')
                    else:
                        logger.warning('Flux calibrator dataset not available!')
                else:
                    logger.info('Flux calibrator dataset was already converted from UVFITS to MIRIAD format')
            else:
                logger.warning('Flux calibrator dataset not specified. Cannot convert flux calibrator!')
        else:
            logger.warning('Not converting flux calibrator dataset!')
        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcaluvfitsavailable:
                        logger.debug('Converting polarised calibrator dataset from UVFITS to MIRIAD format.')
                        subs_managefiles.director(self, 'ch', self.basedir + '00' + '/' + self.crosscalsubdir,
                                                  verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip(
                            'MS') + 'UVFITS'
                        fits.out = self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip(
                            'MS') + 'mir'
                        fits.go()
                        if os.path.isdir(self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip(
                                'MS') + 'mir'):
                            convertpolcaluvfits2miriad = True
                            logger.info('Converted polarised calibrator dataset from UVFITS to MIRIAD format!')
                        else:
                            convertpolcaluvfits2miriad = False
                            logger.warning(
                                'Could not convert polarised calibrator dataset from UVFITS to MIRIAD format!')
                    else:
                        logger.warning('Polarised calibrator dataset not available!')
                else:
                    logger.info('Polarised calibrator dataset was already converted from UVFITS to MIRIAD format')
            else:
                logger.warning('Polarised calibrator dataset not specified. Cannot convert polarised calibrator!')
        else:
            logger.warning('Not converting polarised calibrator dataset!')
        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info('Converting target beam datasets from UVFITS to MIRIAD format.')
                if self.convert_targetbeams == 'all':
                    datasets = glob.glob(
                        self.basedir + '[0-9][0-9]' + '/' + self.crosscalsubdir + '/' + self.target.rstrip(
                            'MS') + 'UVFITS')
                    logger.debug('Converting all available target beam datasets')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = [self.basedir + str(b).zfill(2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip(
                        'MS') + 'UVFITS' for b in beams]
                    logger.debug('Converting all selected target beam datasets')
                for vis in datasets:
                    if not converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])]:
                        if converttargetbeamsuvfitsavailable[int(vis.split('/')[-3])]:
                            subs_managefiles.director(self, 'ch',
                                                      self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir,
                                                      verbose=False)
                            fits = lib.miriad('fits')
                            fits.op = 'uvin'
                            fits.in_ = self.basedir + vis.split('/')[
                                -3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS'
                            fits.out = self.basedir + vis.split('/')[
                                -3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'mir'
                            fits.go()
                            if os.path.isdir(self.basedir + vis.split('/')[-3] + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'mir'):
                                converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] = True
                                logger.debug('Converted dataset of target beam ' + vis.split('/')[
                                    -3] + ' from UVFITS to MIRIAD format!')
                            else:
                                converttargetbeamsuvfits2miriad[int(vis.split('/')[-3])] = False
                                logger.warning('Could not convert dataset for target beam ' + vis.split('/')[
                                    -3] + ' from UVFITS to MIRIAD format!')
                        else:
                            logger.warning('Dataset for target beam ' + vis.split('/')[-3] + ' not available!')
                    else:
                        logger.info('Dataset for target beam ' + vis.split('/')[
                            -3] + ' was already converted from MS to UVFITS format')
            else:
                logger.warning('Target beam dataset(s) not specified. Cannot convert target beam datasets!')
        else:
            logger.warning('Not converting target beam dataset(s)!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_UVFITS2MIRIAD', convertfluxcaluvfits2miriad)
        subs_param.add_param(self, 'convert_polcal_UVFITS2MIRIAD', convertpolcaluvfits2miriad)
        subs_param.add_param(self, 'convert_targetbeams_UVFITS2MIRIAD', converttargetbeamsuvfits2miriad)

        # Remove the UVFITS files if wanted #
        if self.convert_removeuvfits:
            logger.info('Removing all UVFITS files')
            subs_managefiles.director(self, 'rm',
                                      self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.fluxcal.rstrip(
                                          'MS') + 'UVFITS')
            subs_managefiles.director(self, 'rm',
                                      self.basedir + '00' + '/' + self.crosscalsubdir + '/' + self.polcal.rstrip(
                                          'MS') + 'UVFITS')
            for beam in range(nbeams):
                if os.path.isdir(self.basedir + str(beam).zfill(2) + '/' + self.crosscalsubdir):
                    subs_managefiles.director(self, 'rm', self.basedir + str(beam).zfill(
                        2) + '/' + self.crosscalsubdir + '/' + self.target.rstrip('MS') + 'UVFITS')
                else:
                    pass
Beispiel #22
0
               "chan", "chan", "-", "Jy/beam", "Jy/beam", "Jy/beam", "-", "-",
               "Jy/beam", "chan", "chan", "pix", "pix", "pix", "pix", "pix",
               "deg", "deg", "pix", "pix", "pix", "Jy/beam", "-", "-", "-")
catParFormt = ("%12s", "%7i", "%10.3f", "%10.3f", "%10.3f", "%7i", "%7i",
               "%7i", "%7i", "%7i", "%7i", "%8i", "%10.7f", "%10.7f", "%12.6f",
               "%8.6f", "%7i", "%12.6f", "%10.3f", "%10.3f", "%10.3f",
               "%10.3f", "%10.3f", "%10.3f", "%10.3f", "%10.3f", "%10.3f",
               "%10.3f", "%10.3f", "%10.3f", "%12.6f", "%10i", "%7i", "%7i")
prepare = apercal.prepare()

for b in beams:
    loc = '/tank/hess/apertif/' + taskid + '/B0' + str(b).zfill(2) + '/'
    print("\t{}".format(loc))
    clean_catalog = loc + 'clean_cat.txt'

    managefiles.director(prepare, 'ch', loc)

    for c in cubes:
        line_cube = cube_name + '{0}.fits'.format(c)
        beam_cube = beam_name + '{0}.fits'.format(c)
        maskfits = cube_name + '{0}_4sig_mask.fits'.format(c)
        mask2dfits = cube_name + '{0}_4sig_mask-2d.fits'.format(c)
        filteredfits = cube_name + '{0}_filtered.fits'.format(c)
        splinefits = cube_name + '{0}_filtered_spline.fits'.format(c)
        new_splinefits = cube_name + '{0}_all_spline.fits'.format(c)
        catalog_file = cube_name + '{0}_4sig_cat.txt'.format(c)

        if os.path.isfile(maskfits):
            catalog = ascii.read(catalog_file, header_start=10)
            if args.sources == 'all':
                mask_expr = '"(<mask_sofia>.eq.-1).or.(<mask_sofia>.ge.1)"'
Beispiel #23
0
    def split_data(self):
        """
        Splits out a certain frequency range from the datasets
        """

        subs_setinit.setinitdirs(self)

        sbeam = 'split_B' + str(self.beam).zfill(2)

        splitfluxcalstatus = get_param_def(self, sbeam + '_fluxcal_status',
                                           False)
        splitpolcalstatus = get_param_def(self, sbeam + '_polcal_status',
                                          False)
        splittargetbeamsstatus = get_param_def(self,
                                               sbeam + '_targetbeams_status',
                                               False)

        logger.info('Beam ' + self.beam + ': Splitting channel ' +
                    str(self.split_startchannel) + ' until ' +
                    str(self.split_endchannel))
        # split the flux calibrator dataset
        logger.debug("self.fluxcal = {}".format(self.fluxcal))
        logger.debug("os.path.isdir(self.get_fluxcal_path()) = {}".format(
            os.path.isdir(self.get_fluxcal_path())))
        if self.fluxcal != '' and os.path.isdir(self.get_fluxcal_path()):
            fluxcal_split = 'split(vis = "' + self.get_fluxcal_path() + '", outputvis = "' + self.get_fluxcal_path().rstrip('.MS') + '_split.MS"' + \
                ', spw = "0:' + str(self.split_startchannel) + '~' + str(self.split_endchannel) + '", datacolumn = "data")'
            lib.run_casa([fluxcal_split], log_output=True, timeout=30000)
            if os.path.isdir(self.get_fluxcal_path().rstrip('.MS') +
                             '_split.MS'):
                subs_managefiles.director(self, 'rm', self.get_fluxcal_path())
                subs_managefiles.director(
                    self,
                    'rn',
                    self.get_fluxcal_path(),
                    file_=self.get_fluxcal_path().rstrip('.MS') + '_split.MS')
                splitfluxcalstatus = True
            else:
                splitfluxcalstatus = False
                logger.warning(
                    'Beam ' + self.beam +
                    ': Splitting of flux calibrator dataset not successful!')
        else:
            splitfluxcalstatus = False
            logger.warning(
                'Beam ' + self.beam +
                ': Fluxcal not set or dataset not available! Cannot split flux calibrator dataset!'
            )

        subs_param.add_param(self, sbeam + '_fluxcal_status',
                             splitfluxcalstatus)

        # Split the polarised calibrator dataset
        logger.debug("self.polcal = {}".format(self.polcal))
        logger.debug("os.path.isdir(self.get_polcal_path()) = {}".format(
            os.path.isdir(self.get_polcal_path())))
        if self.polcal != '' and os.path.isdir(self.get_polcal_path()):
            polcal_split = 'split(vis = "' + self.get_polcal_path() + '", outputvis = "' + self.get_polcal_path().rstrip('.MS') + '_split.MS"' + \
                ', spw = "0:' + str(self.split_startchannel) + '~' + str(self.split_endchannel) + '", datacolumn = "data")'
            lib.run_casa([polcal_split], log_output=True, timeout=30000)
            if os.path.isdir(self.get_polcal_path().rstrip('.MS') +
                             '_split.MS'):
                subs_managefiles.director(self, 'rm', self.get_polcal_path())
                subs_managefiles.director(
                    self,
                    'rn',
                    self.get_polcal_path(),
                    file_=self.get_polcal_path().rstrip('.MS') + '_split.MS')
                splitpolcalstatus = True
            else:
                splitpolcalstatus = False
                logger.warning(
                    'Beam ' + self.beam +
                    ': Splitting of polarised calibrator dataset not successful!'
                )
        else:
            splitpolcalstatus = False
            logger.warning(
                'Beam ' + self.beam +
                ': Polcal not set or dataset not available! Cannot split polarised calibrator dataset!'
            )

        subs_param.add_param(self, sbeam + '_polcal_status', splitpolcalstatus)

        # Split the target dataset
        logger.debug("self.target = {}".format(self.target))
        logger.debug("os.path.isdir(self.get_target_path()) = {}".format(
            os.path.isdir(self.get_target_path())))
        if self.target != '' and os.path.isdir(self.get_target_path()):
            target_split = 'split(vis = "' + self.get_target_path() + '", outputvis = "' + self.get_target_path().rstrip('.MS') + '_split.MS"' + \
                ', spw = "0:' + str(self.split_startchannel) + '~' + str(self.split_endchannel) + '", datacolumn = "data")'
            lib.run_casa([target_split], log_output=True, timeout=30000)
            if os.path.isdir(self.get_target_path().rstrip('.MS') +
                             '_split.MS'):
                subs_managefiles.director(self, 'rm', self.get_target_path())
                subs_managefiles.director(
                    self,
                    'rn',
                    self.get_target_path(),
                    file_=self.get_target_path().rstrip('.MS') + '_split.MS')
                splittargetbeamsstatus = True
            else:
                splittargetbeamsstatus = False
                logger.warning('Beam ' + self.beam +
                               ': Splitting of target dataset not successful!')
        else:
            splittargetbeamsstatus = False
            logger.warning(
                'Beam ' + self.beam +
                ': Target not set or dataset not available! Cannot split target beam dataset!'
            )

        subs_param.add_param(self, sbeam + '_targetbeams_status',
                             splittargetbeamsstatus)
Beispiel #24
0
    def copyobs(self):
        """
        Prepares the directory structure and copies over the needed data from ALTA.
        Checks for data in the current working directories and copies only missing data.
        """
        subs_setinit.setinitdirs(self)

        # Check if the parameter is already in the parameter file and load it otherwise create the needed arrays #

        if not os.path.isdir(self.basedir):
            os.mkdir(self.basedir)

        # Is the fluxcal data requested?
        preparefluxcalrequested = get_param_def(self,
                                                'prepare_fluxcal_requested',
                                                False)

        # Is the polcal data requested?
        preparepolcalrequested = get_param_def(self,
                                               'prepare_polcal_requested',
                                               False)

        # Is the target data requested? One entry per beam
        preparetargetbeamsrequested = get_param_def(
            self, 'prepare_targetbeams_requested', np.full(self.NBEAMS, False))

        # Is the fluxcal data already on disk?
        preparefluxcaldiskstatus = get_param_def(self,
                                                 'prepare_fluxcal_diskstatus',
                                                 False)

        # Is the polcal data already on disk?
        preparepolcaldiskstatus = get_param_def(self,
                                                'prepare_polcal_diskstatus',
                                                False)

        # Is the target data already on disk? One entry per beam
        preparetargetbeamsdiskstatus = get_param_def(
            self, 'prepare_targetbeams_diskstatus',
            np.full(self.NBEAMS, False))

        # Is the fluxcal data on ALTA?
        preparefluxcalaltastatus = get_param_def(self,
                                                 'prepare_fluxcal_altastatus',
                                                 False)

        # Is the polcal data on ALTA?
        preparepolcalaltastatus = get_param_def(self,
                                                'prepare_polcal_altastatus',
                                                False)

        # Is the target data on disk? One entry per beam
        preparetargetbeamsaltastatus = get_param_def(
            self, 'prepare_targetbeams_altastatus',
            np.full(self.NBEAMS, False))

        # Is the fluxcal data copied?
        preparefluxcalcopystatus = get_param_def(self,
                                                 'prepare_fluxcal_copystatus',
                                                 False)

        # Is the polcal data on copied?
        preparepolcalcopystatus = get_param_def(self,
                                                'prepare_polcal_copystatus',
                                                False)

        # Is the target data copied? One entry per beam
        preparetargetbeamscopystatus = get_param_def(
            self, 'prepare_targetbeams_copystatus',
            np.full(self.NBEAMS, False))

        # Reason for flux calibrator dataset not being there
        preparefluxcalrejreason = get_param_def(self,
                                                'prepare_fluxcal_rejreason',
                                                np.full(1, '', dtype='U50'))

        # Reason for polarisation calibrator dataset not being there
        preparepolcalrejreason = get_param_def(self,
                                               'prepare_polcal_rejreason',
                                               np.full(1, '', dtype='U50'))

        # Reason for a beam dataset not being there
        preparetargetbeamsrejreason = get_param_def(
            self, 'prepare_targetbeams_rejreason',
            np.full(self.NBEAMS, '', dtype='U50'))

        ################################################
        # Start the preparation of the flux calibrator #
        ################################################

        if self.fluxcal != '':  # If the flux calibrator is requested
            preparefluxcalrejreason[0] = ''  # Empty the comment string
            preparefluxcalrequested = True
            fluxcal = self.get_fluxcal_path()
            preparefluxcaldiskstatus = os.path.isdir(fluxcal)
            if preparefluxcaldiskstatus:
                logger.debug(
                    'Flux calibrator dataset found on disk ({})'.format(
                        fluxcal))
            else:
                logger.debug(
                    'Flux calibrator dataset not on disk ({})'.format(fluxcal))

            if hasattr(self,
                       'prepare_bypass_alta') and self.prepare_bypass_alta:
                logger.debug("Skipping fetching dataset from ALTA")
            else:
                # Check if the flux calibrator dataset is available on ALTA
                preparefluxcalaltastatus = getstatus_alta(
                    self.prepare_date, self.prepare_obsnum_fluxcal, self.beam)
                if preparefluxcalaltastatus:
                    logger.debug('Flux calibrator dataset available on ALTA')
                else:
                    logger.warning(
                        'Flux calibrator dataset not available on ALTA')
                # Copy the flux calibrator data from ALTA if needed
                if preparefluxcaldiskstatus and preparefluxcalaltastatus:
                    preparefluxcalcopystatus = True
                elif preparefluxcaldiskstatus and not preparefluxcalaltastatus:
                    preparefluxcalcopystatus = True
                    logger.warning(
                        'Flux calibrator data available on disk, but not in ALTA!'
                    )
                elif not preparefluxcaldiskstatus and preparefluxcalaltastatus:
                    subs_managefiles.director(self,
                                              'mk',
                                              self.basedir + self.beam + '/' +
                                              self.rawsubdir,
                                              verbose=False)
                    getdata_alta(int(self.prepare_date),
                                 int(self.prepare_obsnum_fluxcal),
                                 0,
                                 targetdir=self.rawdir + '/' + self.fluxcal)
                    if os.path.isdir(self.get_fluxcal_path()):
                        preparefluxcalcopystatus = True
                        logger.debug(
                            'Flux calibrator dataset successfully copied from ALTA'
                        )
                    else:
                        preparefluxcalcopystatus = False
                        preparefluxcalrejreason[
                            0] = 'Copy from ALTA not successful'
                        logger.error(
                            'Flux calibrator dataset available on ALTA, but NOT successfully copied!'
                        )
                    if self.prepare_flip_ra:
                        flip_ra(self.rawdir + '/' + self.fluxcal,
                                logger=logger)
                elif not preparefluxcaldiskstatus and not preparefluxcalaltastatus:
                    preparefluxcalcopystatus = False
                    preparefluxcalrejreason[0] = 'Dataset not on ALTA or disk'
                    logger.error(
                        'Flux calibrator dataset not available on disk nor in ALTA! The next steps will not work!'
                    )
        else:  # In case the flux calibrator is not specified meaning the parameter is empty.
            preparefluxcalrequested = False
            preparefluxcaldiskstatus = False
            preparefluxcalaltastatus = False
            preparefluxcalcopystatus = False
            preparefluxcalrejreason[0] = 'Dataset not specified'
            logger.error(
                'No flux calibrator dataset specified. The next steps will not work!'
            )

        # Save the derived parameters for the fluxcal to the parameter file

        subs_param.add_param(self, 'prepare_fluxcal_requested',
                             preparefluxcalrequested)
        subs_param.add_param(self, 'prepare_fluxcal_diskstatus',
                             preparefluxcaldiskstatus)
        subs_param.add_param(self, 'prepare_fluxcal_altastatus',
                             preparefluxcalaltastatus)
        subs_param.add_param(self, 'prepare_fluxcal_copystatus',
                             preparefluxcalcopystatus)
        subs_param.add_param(self, 'prepare_fluxcal_rejreason',
                             preparefluxcalrejreason)

        ########################################################
        # Start the preparation of the polarisation calibrator #
        ########################################################

        if self.polcal != '':  # If the polarised calibrator is requested
            preparepolcalrejreason[0] = ''  # Empty the comment string
            preparepolcalrequested = True
            preparepolcaldiskstatus = os.path.isdir(self.get_polcal_path())
            if preparepolcaldiskstatus:
                logger.debug('Polarisation calibrator dataset found on disk')
            else:
                logger.debug('Polarisation calibrator dataset not on disk')

            if hasattr(self,
                       'prepare_bypass_alta') and self.prepare_bypass_alta:
                logger.debug("Skipping fetching dataset from ALTA")
            else:

                # Check if the polarisation calibrator dataset is available on ALTA
                preparepolcalaltastatus = getstatus_alta(
                    self.prepare_date, self.prepare_obsnum_polcal, self.beam)
                if preparepolcalaltastatus:
                    logger.debug(
                        'Polarisation calibrator dataset available on ALTA')
                else:
                    logger.warning(
                        'Polarisation calibrator dataset not available on ALTA'
                    )
                # Copy the polarisation calibrator data from ALTA if needed
                if preparepolcaldiskstatus and preparepolcalaltastatus:
                    preparepolcalcopystatus = True
                elif preparepolcaldiskstatus and not preparepolcalaltastatus:
                    preparepolcalcopystatus = True
                    logger.warning(
                        'Polarisation calibrator data available on disk, but not in ALTA!'
                    )
                elif not preparepolcaldiskstatus and preparepolcalaltastatus:
                    subs_managefiles.director(self,
                                              'mk',
                                              self.basedir + self.beam + '/' +
                                              self.rawsubdir,
                                              verbose=False)
                    getdata_alta(int(self.prepare_date),
                                 int(self.prepare_obsnum_polcal),
                                 0,
                                 targetdir=self.rawdir + '/' + self.polcal)
                    if os.path.isdir(self.get_polcal_path()):
                        preparepolcalcopystatus = True
                        logger.debug(
                            'Polarisation calibrator dataset successfully copied from ALTA'
                        )
                    else:
                        preparepolcalcopystatus = False
                        preparepolcalrejreason[
                            0] = 'Copy from ALTA not successful'
                        logger.error(
                            'Polarisation calibrator dataset available on ALTA, but NOT successfully copied!'
                        )
                    if self.prepare_flip_ra:
                        flip_ra(self.rawdir + '/' + self.polcal, logger=logger)
                elif not preparepolcaldiskstatus and not preparepolcalaltastatus:
                    preparepolcalcopystatus = False
                    preparepolcalrejreason[0] = 'Dataset not on ALTA or disk'
                    logger.warning(
                        'Polarisation calibrator dataset not available on disk nor in ALTA! Polarisation calibration will not work!'
                    )
        else:  # In case the polarisation calibrator is not specified meaning the parameter is empty.
            preparepolcalrequested = False
            preparepolcaldiskstatus = False
            preparepolcalaltastatus = False
            preparepolcalcopystatus = False
            preparepolcalrejreason[0] = 'Dataset not specified'
            logger.warning(
                'No polarisation calibrator dataset specified. Polarisation calibration will not work!'
            )

        # Save the derived parameters for the polcal to the parameter file

        subs_param.add_param(self, 'prepare_polcal_requested',
                             preparepolcalrequested)
        subs_param.add_param(self, 'prepare_polcal_diskstatus',
                             preparepolcaldiskstatus)
        subs_param.add_param(self, 'prepare_polcal_altastatus',
                             preparepolcalaltastatus)
        subs_param.add_param(self, 'prepare_polcal_copystatus',
                             preparepolcalcopystatus)
        subs_param.add_param(self, 'prepare_polcal_rejreason',
                             preparepolcalrejreason)

        ################################################
        # Start the preparation of the target datasets #
        ################################################

        if self.prepare_obsnum_target and self.prepare_obsnum_target != '':
            if self.prepare_target_beams == 'all':  # if all beams are requested
                reqbeams_int = range(
                    self.NBEAMS)  # create a list of numbers for the beams
                reqbeams = [str(b).zfill(2)
                            for b in reqbeams_int]  # Add the leading zeros
            else:  # if only certain beams are requested
                reqbeams = self.prepare_target_beams.split(",")
                reqbeams_int = [int(b) for b in reqbeams]
                reqbeams = [str(b).zfill(2)
                            for b in reqbeams_int]  # Add leading zeros
            for beam in reqbeams:
                preparetargetbeamsrequested[int(beam)] = True
            for b in reqbeams_int:
                # Check which target beams are already on disk
                preparetargetbeamsrejreason[int(
                    b)] = ''  # Empty the comment string
                preparetargetbeamsdiskstatus[b] = os.path.isdir(
                    self.basedir + str(b).zfill(2) + '/' + self.rawsubdir +
                    '/' + self.target)
                if preparetargetbeamsdiskstatus[b]:
                    logger.debug('Target dataset for beam ' + str(b).zfill(2) +
                                 ' found on disk')
                else:
                    logger.debug('Target dataset for beam ' + str(b).zfill(2) +
                                 ' NOT found on disk')

                if hasattr(self,
                           'prepare_bypass_alta') and self.prepare_bypass_alta:
                    logger.debug("Skipping fetching dataset from ALTA")
                else:
                    # Check which target datasets are available on ALTA
                    preparetargetbeamsaltastatus[b] = getstatus_alta(
                        self.prepare_date, self.prepare_obsnum_target,
                        str(b).zfill(2))
                    if preparetargetbeamsaltastatus[b]:
                        logger.debug('Target dataset for beam ' +
                                     str(b).zfill(2) + ' available on ALTA')
                    else:
                        logger.debug('Target dataset for beam ' +
                                     str(b).zfill(2) +
                                     ' NOT available on ALTA')

            if hasattr(self,
                       'prepare_bypass_alta') and self.prepare_bypass_alta:
                logger.debug("Skipping fetching dataset from ALTA")
            else:
                # Set the copystatus of the beams and copy beams which are requested but not on disk
                for c in reqbeams_int:
                    if preparetargetbeamsdiskstatus[
                            c] and preparetargetbeamsaltastatus[c]:
                        preparetargetbeamscopystatus[c] = True
                    elif preparetargetbeamsdiskstatus[
                            c] and not preparetargetbeamsaltastatus[c]:
                        preparetargetbeamscopystatus[c] = True
                        logger.warning('Target dataset for beam ' +
                                       str(c).zfill(2) +
                                       ' available on disk, but not in ALTA!')
                    elif not preparetargetbeamsdiskstatus[
                            c] and preparetargetbeamsaltastatus[c] and str(
                                c
                            ).zfill(
                                2
                            ) in reqbeams:  # if target dataset is requested, but not on disk
                        subs_managefiles.director(self,
                                                  'mk',
                                                  self.basedir +
                                                  str(c).zfill(2) + '/' +
                                                  self.rawsubdir,
                                                  verbose=False)
                        getdata_alta(int(self.prepare_date),
                                     int(self.prepare_obsnum_target),
                                     int(str(c).zfill(2)),
                                     targetdir=self.basedir + str(c).zfill(2) +
                                     '/' + self.rawsubdir + '/' + self.target)
                        # Check if copy was successful
                        if os.path.isdir(self.basedir + str(c).zfill(2) + '/' +
                                         self.rawsubdir + '/' + self.target):
                            preparetargetbeamscopystatus[c] = True
                        else:
                            preparetargetbeamscopystatus[c] = False
                            preparetargetbeamsrejreason[int(
                                c)] = 'Copy from ALTA not successful'
                            logger.error(
                                'Target beam dataset available on ALTA, but NOT successfully copied!'
                            )
                        if self.prepare_flip_ra:
                            flip_ra(self.basedir + str(c).zfill(2) + '/' +
                                    self.rawsubdir + '/' + self.target,
                                    logger=logger)
                    elif not preparetargetbeamsdiskstatus[
                            c] and not preparetargetbeamsaltastatus[c] and str(
                                c).zfill(2) in reqbeams:
                        preparetargetbeamscopystatus[c] = False
                        preparetargetbeamsrejreason[int(
                            c)] = 'Dataset not on ALTA or disk'
                        logger.error(
                            'Target beam dataset not available on disk nor in ALTA! Requested beam cannot be processed!'
                        )
        else:  # If no target dataset is requested meaning the parameter is empty
            logger.warning('No target datasets specified!')
            for b in range(self.NBEAMS):
                preparetargetbeamsrequested[b] = False
                preparetargetbeamsdiskstatus[b] = False
                preparetargetbeamsaltastatus[b] = False
                preparetargetbeamscopystatus[b] = False
                preparetargetbeamsrejreason[int(b)] = 'Dataset not specified'

        # Save the derived parameters for the target beams to the parameter file

        subs_param.add_param(self, 'prepare_targetbeams_requested',
                             preparetargetbeamsrequested)
        subs_param.add_param(self, 'prepare_targetbeams_diskstatus',
                             preparetargetbeamsdiskstatus)
        subs_param.add_param(self, 'prepare_targetbeams_altastatus',
                             preparetargetbeamsaltastatus)
        subs_param.add_param(self, 'prepare_targetbeams_copystatus',
                             preparetargetbeamscopystatus)
        subs_param.add_param(self, 'prepare_targetbeams_rejreason',
                             preparetargetbeamsrejreason)
Beispiel #25
0
 def selfcal_standard(self):
     """
     Executes the standard method of self-calibration with the given parameters
     """
     subs_setinit.setinitdirs(self)
     subs_setinit.setdatasetnamestomiriad(self)
     logger.info(' Starting standard self calibration routine')
     subs_managefiles.director(self, 'ch', self.selfcaldir)
     for chunk in self.list_chunks():
         logger.info('Starting standard self-calibration routine on frequency chunk ' + chunk + ' #')
         subs_managefiles.director(self, 'ch', self.selfcaldir + '/' + chunk)
         if os.path.isfile(self.selfcaldir + '/' + chunk + '/' + chunk + '.mir/visdata'):
             theoretical_noise = calc_theoretical_noise(self.selfcaldir + '/' + chunk + '/' + chunk + '.mir')
             logger.info('Theoretical noise for chunk ' + chunk + ' is ' + str(theoretical_noise) + ' Jy/beam #')
             theoretical_noise_threshold = self.calc_theoretical_noise_threshold(theoretical_noise)
             logger.info('Your theoretical noise threshold will be ' + str(
                 self.selfcal_standard_nsigma) + ' times the theoretical noise corresponding to ' + str(
                 theoretical_noise_threshold) + ' Jy/beam #')
             dr_list = calc_dr_maj(self.selfcal_standard_drinit, self.selfcal_standard_dr0,
                                        self.selfcal_standard_majorcycle, self.selfcal_standard_majorcycle_function)
             logger.info(
                 'Your dynamic range limits are set to ' + str(dr_list) + ' for the major self-calibration cycles #')
             for majc in range(self.selfcal_standard_majorcycle):
                 logger.info(
                     'Major self-calibration cycle ' + str(majc) + ' for frequency chunk ' + chunk + ' started #')
                 subs_managefiles.director(self, 'mk', self.selfcaldir + '/' + str(chunk) + '/' + str(majc).zfill(2))
                 # Calculate the dynamic ranges during minor cycles
                 dr_minlist = self.calc_dr_min(dr_list, majc, self.selfcal_standard_minorcycle,
                                               self.selfcal_standard_minorcycle_function)
                 logger.info('The minor cycle dynamic range limits for major cycle ' + str(majc) + ' are ' + str(
                     dr_minlist) + ' #')
                 for minc in range(self.selfcal_standard_minorcycle):
                     try:
                         self.run_continuum_minoriteration(chunk, majc, minc, dr_minlist[minc],
                                                           theoretical_noise_threshold)
                     except Exception:
                         logger.warning('Chunk ' + chunk + ' does not seem to contain data to image #')
                         break
                 try:
                     logger.info('Doing self-calibration with uvmin=' + str(
                         self.selfcal_standard_uvmin[majc]) + ', uvmax=' + str(
                         self.selfcal_standard_uvmax[majc]) + ', solution interval=' + str(
                         self.selfcal_standard_solint[majc]) + ' minutes for major cycle ' + str(majc).zfill(
                         2) + ' #')
                     selfcal = lib.miriad('selfcal')
                     selfcal.vis = chunk + '.mir'
                     selfcal.select = '"' + 'uvrange(' + str(self.selfcal_standard_uvmin[majc]) + ',' + str(
                         self.selfcal_standard_uvmax[majc]) + ')"'
                     selfcal.model = str(majc).zfill(2) + '/model_' + str(minc).zfill(2)
                     selfcal.interval = self.selfcal_standard_solint[majc]
                     # Choose reference antenna if given
                     if self.selfcal_refant == '':
                         pass
                     else:
                         selfcal.refant = self.selfcal_refant
                     # Enable amplitude calibration if triggered
                     if not self.selfcal_standard_amp:  # See if we want to do amplitude calibration
                         selfcal.options = 'mfs,phase'
                     elif self.selfcal_standard_amp:
                         selfcal.options = 'mfs,amp'
                     elif self.selfcal_standard_amp == 'auto':
                         modelflux = self.calc_isum(str(majc).zfill(2) + '/model_' + str(minc).zfill(2))
                         if modelflux >= self.selfcal_standard_amp_auto_limit:
                             logger.info(
                                 'Flux of clean model is ' + str(modelflux) + ' Jy. Doing amplitude calibration. #')
                             selfcal.options = 'mfs,amp'
                         else:
                             selfcal.options = 'mfs,phase'
                     if self.selfcal_standard_nfbin >= 1:
                         selfcal.nfbin = self.selfcal_standard_nfbin
                     selfcal.go()
                     logger.info('Major self-calibration cycle ' + str(
                         majc) + ' for frequency chunk ' + chunk + ' finished #')
                 except Exception:
                     logger.warning(
                         'Model for self-calibration not found. No further calibration on this chunk possible!')
                     break
             logger.info('Standard self-calibration routine for chunk ' + chunk + ' finished #')
         else:
             logger.warning('No data in chunk ' + chunk + '. Maybe all data is flagged? #')
     logger.info(' Standard self calibration routine finished')
Beispiel #26
0
 def run_continuum_minoriteration(self, chunk, majc, minc, drmin, theoretical_noise_threshold):
     """
     Does a selfcal minor iteration for the standard mode
     chunk: The frequency chunk to image and calibrate
     maj: Current major iteration
     min: Current minor iteration
     drmin: maximum dynamic range for minor iteration
     theoretical_noise_threshold: calculated theoretical noise threshold
     """
     subs_setinit.setinitdirs(self)
     subs_setinit.setdatasetnamestomiriad(self)
     logger.info('Minor self-calibration cycle ' + str(minc) + ' for frequency chunk ' + chunk + ' started #')
     if minc == 0:
         invert = lib.miriad('invert')  # Create the dirty image
         invert.vis = chunk + '.mir'
         invert.map = str(majc).zfill(2) + '/map_' + str(minc).zfill(2)
         invert.beam = str(majc).zfill(2) + '/beam_' + str(minc).zfill(2)
         invert.imsize = self.selfcal_image_imsize
         invert.cell = self.selfcal_image_cellsize
         invert.stokes = 'ii'
         invert.options = 'mfs,double'
         invert.slop = 1
         invert.robust = -2
         invert.go()
         imax = self.calc_imax(str(majc).zfill(2) + '/map_' + str(minc).zfill(2))
         noise_threshold = self.calc_noise_threshold(imax, minc, majc)
         dynamic_range_threshold = calc_dynamic_range_threshold(imax, drmin,
                                                                     self.selfcal_standard_minorcycle0_dr)
         mask_threshold, mask_threshold_type = calc_mask_threshold(theoretical_noise_threshold, noise_threshold,
                                                                        dynamic_range_threshold)
         logger.info('Mask threshold for major/minor cycle ' + str(majc) + '/' + str(minc) + ' set to ' + str(
             mask_threshold) + ' Jy/beam #')
         logger.info('Mask threshold set by ' + str(mask_threshold_type) + ' #')
         if majc == 0:
             maths = lib.miriad('maths')
             maths.out = str(majc).zfill(2) + '/mask_' + str(minc).zfill(2)
             maths.exp = '"<' + str(majc).zfill(2) + '/map_' + str(minc).zfill(2) + '>"'
             maths.mask = '"<' + str(majc).zfill(2) + '/map_' + str(minc).zfill(2) + '>.gt.' + str(
                 mask_threshold) + '"'
             maths.go()
             logger.info('Mask with threshold ' + str(mask_threshold) + ' Jy/beam created #')
         else:
             subs_managefiles.director(self, 'cp', str(majc).zfill(2) + '/mask_' + str(minc).zfill(2),
                                       file_=str(majc - 1).zfill(2) + '/mask_' + str(
                                           self.selfcal_standard_minorcycle - 1).zfill(2))
             logger.info('Mask from last minor iteration of last major cycle copied #')
         clean_cutoff = self.calc_clean_cutoff(mask_threshold)
         logger.info('Clean threshold at major/minor cycle ' + str(majc) + '/' + str(minc) + ' was set to ' + str(
             clean_cutoff) + ' Jy/beam #')
         clean = lib.miriad('clean')  # Clean the image down to the calculated threshold
         clean.map = str(majc).zfill(2) + '/map_' + str(0).zfill(2)
         clean.beam = str(majc).zfill(2) + '/beam_' + str(0).zfill(2)
         clean.out = str(majc).zfill(2) + '/model_' + str(minc).zfill(2)
         clean.cutoff = clean_cutoff
         clean.niters = 1000000
         clean.region = '"' + 'mask(' + str(majc).zfill(2) + '/mask_' + str(minc).zfill(2) + ')' + '"'
         clean.go()
         logger.info('Major/minor cycle ' + str(majc) + '/' + str(minc) + ' cleaning done #')
         restor = lib.miriad('restor')
         restor.model = str(majc).zfill(2) + '/model_' + str(minc).zfill(2)
         restor.beam = str(majc).zfill(2) + '/beam_' + str(0).zfill(2)
         restor.map = str(majc).zfill(2) + '/map_' + str(0).zfill(2)
         restor.out = str(majc).zfill(2) + '/image_' + str(minc).zfill(2)
         restor.mode = 'clean'
         restor.go()  # Create the cleaned image
         logger.info('Cleaned image for major/minor cycle ' + str(majc) + '/' + str(minc) + ' created #')
         restor.mode = 'residual'
         restor.out = str(majc).zfill(2) + '/residual_' + str(minc).zfill(2)
         restor.go()
         logger.info('Residual image for major/minor cycle ' + str(majc) + '/' + str(minc) + ' created #')
         logger.info('Peak of the residual image is ' + str(
             self.calc_imax(str(majc).zfill(2) + '/residual_' + str(minc).zfill(2))) + ' Jy/beam #')
         logger.info('RMS of the residual image is ' + str(
             self.calc_irms(str(majc).zfill(2) + '/residual_' + str(minc).zfill(2))) + ' Jy/beam #')
     else:
         imax = self.calc_imax(str(majc).zfill(2) + '/map_' + str(0).zfill(2))
         noise_threshold = self.calc_noise_threshold(imax, minc, majc)
         dynamic_range_threshold = calc_dynamic_range_threshold(imax, drmin,
                                                                     self.selfcal_standard_minorcycle0_dr)
         mask_threshold, mask_threshold_type = calc_mask_threshold(theoretical_noise_threshold, noise_threshold,
                                                                        dynamic_range_threshold)
         logger.info('Mask threshold for major/minor cycle ' + str(majc) + '/' + str(minc) + ' set to ' + str(
             mask_threshold) + ' Jy/beam #')
         logger.info('Mask threshold set by ' + str(mask_threshold_type) + ' #')
         maths = lib.miriad('maths')
         maths.out = str(majc).zfill(2) + '/mask_' + str(minc).zfill(2)
         maths.exp = '"<' + str(majc).zfill(2) + '/image_' + str(minc - 1).zfill(2) + '>"'
         maths.mask = '"<' + str(majc).zfill(2) + '/image_' + str(minc - 1).zfill(2) + '>.gt.' + str(
             mask_threshold) + '"'
         maths.go()
         logger.info('Mask with threshold ' + str(mask_threshold) + ' Jy/beam created #')
         clean_cutoff = self.calc_clean_cutoff(mask_threshold)
         logger.info('Clean threshold at major/minor cycle ' + str(majc) + '/' + str(minc) + ' was set to ' + str(
             clean_cutoff) + ' Jy/beam #')
         clean = lib.miriad('clean')  # Clean the image down to the calculated threshold
         clean.map = str(majc).zfill(2) + '/map_' + str(0).zfill(2)
         clean.beam = str(majc).zfill(2) + '/beam_' + str(0).zfill(2)
         clean.model = str(majc).zfill(2) + '/model_' + str(minc - 1).zfill(2)
         clean.out = str(majc).zfill(2) + '/model_' + str(minc).zfill(2)
         clean.cutoff = clean_cutoff
         clean.niters = 1000000
         clean.region = '"' + 'mask(' + str(majc).zfill(2) + '/mask_' + str(minc).zfill(2) + ')' + '"'
         clean.go()
         logger.info('Major/minor cycle ' + str(majc) + '/' + str(minc) + ' cleaning done #')
         restor = lib.miriad('restor')
         restor.model = str(majc).zfill(2) + '/model_' + str(minc).zfill(2)
         restor.beam = str(majc).zfill(2) + '/beam_' + str(0).zfill(2)
         restor.map = str(majc).zfill(2) + '/map_' + str(0).zfill(2)
         restor.out = str(majc).zfill(2) + '/image_' + str(minc).zfill(2)
         restor.mode = 'clean'
         restor.go()  # Create the cleaned image
         logger.info('Cleaned image for major/minor cycle ' + str(majc) + '/' + str(minc) + ' created #')
         restor.mode = 'residual'
         restor.out = str(majc).zfill(2) + '/residual_' + str(minc).zfill(2)
         restor.go()
         logger.info('Residual image for major/minor cycle ' + str(majc) + '/' + str(minc) + ' created #')
         logger.info('Peak of the residual image is ' + str(
             self.calc_imax(str(majc).zfill(2) + '/residual_' + str(minc).zfill(2))) + ' Jy/beam #')
         logger.info('RMS of the residual image is ' + str(
             self.calc_irms(str(majc).zfill(2) + '/residual_' + str(minc).zfill(2))) + ' Jy/beam #')
     logger.info('Minor self-calibration cycle ' + str(minc) + ' for frequency chunk ' + chunk + ' finished #')
Beispiel #27
0
    def convert_selfcaluv2uvfits(self):
        """
        Looks for the last self-calibrated uv-fits file, copies its gains over to the original file, applies them and coverts to UVFITS format
        """
        subs_setinit.setinitdirs(self)

        sbeam = 'selfcal_B' + str(self.beam).zfill(2)
        tbeam = 'transfer_B' + str(self.beam).zfill(2)

        # Create the parameters for the parameter file for the conversion of the UVFITS-files

        transfertargetbeamsselfcaluv2uvfitsstatus = get_param_def(
            self, tbeam + '_targetbeams_selfcaluv2uvfits_status', False)

        if self.transfer_convert_selfcaluv2uvfits:
            subs_setinit.setinitdirs(self)
            subs_setinit.setdatasetnamestomiriad(self)
            subs_managefiles.director(self,
                                      'ch',
                                      self.transferdir,
                                      verbose=True)
            if not transfertargetbeamsselfcaluv2uvfitsstatus:
                # Get the status of the selfcal for the specified beam
                selfcaltargetbeamsphasestatus = get_param_def(
                    self, sbeam + '_targetbeams_phase_status', False)
                selfcaltargetbeamsampstatus = get_param_def(
                    self, sbeam + '_targetbeams_amp_status', False)
                datasetname_amp = os.path.join(
                    self.selfcaldir, self.target).rstrip('.mir') + '_amp.mir'
                datasetname_phase = os.path.join(self.selfcaldir, self.target)
                logger.debug("Setting amplitude selfcal file name: {}".format(
                    datasetname_amp))
                logger.debug("Setting phase selfcal file name: {}".format(
                    datasetname_phase))
                # datasetname_amp = self.get_target_path().rstrip('.mir') + '_amp.mir'
                # datasetname_phase = self.get_target_path()
                if os.path.isdir(
                        datasetname_amp) and selfcaltargetbeamsampstatus:
                    logger.info('Beam ' + self.beam +
                                ': Using amplitude self-calibrated dataset!')
                    dataset = datasetname_amp
                elif os.path.isdir(
                        datasetname_phase) and selfcaltargetbeamsphasestatus:
                    logger.info(
                        'Beam ' + self.beam +
                        ': Using phase self-calibrated dataset. Amplitude calibration was not successful or not wanted!'
                    )
                    dataset = datasetname_phase
                else:
                    dataset = None

                if dataset is not None:
                    # Copy the raw dataset to the transfer directory
                    subs_managefiles.director(
                        self,
                        'cp',
                        self.transferdir + '/' + self.target,
                        file_=self.crosscaldir + '/' + self.target)
                    if selfcaltargetbeamsampstatus:
                        gpcopy = lib.miriad('gpcopy')
                        gpcopy.vis = datasetname_phase
                        gpcopy.out = self.transferdir + '/' + self.target
                        gpcopy.go()
                        uvaver = lib.miriad('uvaver')
                        uvaver.vis = self.transferdir + '/' + self.target
                        uvaver.out = self.transferdir + '/' + \
                            self.target.rstrip('.mir') + '_phase.mir'
                        uvaver.go()
                        gpcopy = lib.miriad('gpcopy')
                        gpcopy.vis = datasetname_amp
                        gpcopy.out = self.transferdir + '/' + \
                            self.target.rstrip('.mir') + '_phase.mir'
                        gpcopy.go()
                        fits = lib.miriad('fits')
                        fits.op = 'uvout'
                        fits.in_ = self.transferdir + '/' + \
                            self.target.rstrip('.mir') + '_phase.mir'
                        fits.out = self.transferdir + '/' + \
                            self.target.rstrip('.mir') + '.UVFITS'
                        fits.go()
                        if os.path.isfile(self.transferdir + '/' +
                                          self.target.rstrip('.mir') +
                                          '.UVFITS'):
                            subs_managefiles.director(
                                self, 'rm',
                                self.transferdir + '/' + self.target)
                            subs_managefiles.director(
                                self, 'rm', self.transferdir + '/' +
                                self.target.rstrip('.mir') + '_phase.mir')
                            transfertargetbeamsselfcaluv2uvfitsstatus = True
                        else:
                            logger.error(
                                'Beam ' + self.beam +
                                ': Conversion was not successful. No UVFITS-file generated!'
                            )
                            transfertargetbeamsselfcaluv2uvfitsstatus = False
                    elif selfcaltargetbeamsphasestatus:
                        gpcopy = lib.miriad('gpcopy')
                        gpcopy.vis = datasetname_phase
                        gpcopy.out = self.transferdir + '/' + self.target
                        gpcopy.go()
                        fits = lib.miriad('fits')
                        fits.op = 'uvout'
                        fits.in_ = self.transferdir + '/' + self.target
                        fits.out = self.transferdir + '/' + \
                            self.target.rstrip('.mir') + '.UVFITS'
                        fits.go()
                        if os.path.isfile(self.transferdir + '/' +
                                          self.target.rstrip('.mir') +
                                          '.UVFITS'):
                            subs_managefiles.director(
                                self, 'rm',
                                self.transferdir + '/' + self.target)
                            transfertargetbeamsselfcaluv2uvfitsstatus = True
                        else:
                            logger.error(
                                'Beam ' + self.beam +
                                ': Conversion was not successful. No UVFITS-file generated!'
                            )
                            transfertargetbeamsselfcaluv2uvfitsstatus = False
                else:
                    logger.error(
                        'Beam ' + self.beam +
                        ': Self-calibration was not successful. No conversion to UVFITS-format possible!'
                    )
                    transfertargetbeamsselfcaluv2uvfitsstatus = False
            else:
                logger.info(
                    'Beam ' + self.beam +
                    ': Conversion of final calibrated data to UVFITS-format already successfully executed!'
                )
        else:
            logger.info(
                'Beam ' + self.beam +
                ': Conversion of final calibrated data to UVFITS-format not selected!'
            )

        # Save the derived parameters to the parameter file

        subs_param.add_param(self,
                             tbeam + '_targetbeams_selfcaluv2uvfits_status',
                             transfertargetbeamsselfcaluv2uvfitsstatus)
Beispiel #28
0
    def ms2miriad(self):
        """
        Converts the data from MS to MIRIAD format via UVFITS using drivecasa. Does it for the flux calibrator,
        polarisation calibrator, and target field independently.
        """
        subs_setinit.setinitdirs(self)
        nbeams = 37

        # Create the parameters for the parameter file for converting from MS to UVFITS format

        # Flux calibrator MS dataset available?
        convertfluxcalmsavailable = get_param_def(
            self, 'convert_fluxcal_MSavailable', False)

        # Polarised calibrator MS dataset available?
        convertpolcalmsavailable = get_param_def(self,
                                                 'convert_polcal_MSavailable',
                                                 False)

        # Target beam MS dataset available?
        converttargetbeamsmsavailable = get_param_def(
            self, 'convert_targetbeams_MSavailable', np.full(nbeams, False))

        # Flux calibrator MS dataset converted to UVFITS?
        convertfluxcalms2uvfits = get_param_def(self,
                                                'convert_fluxcal_MS2UVFITS',
                                                False)

        # Polarised calibrator MS dataset converted to UVFITS?
        convertpolcalms2uvfits = get_param_def(self,
                                               'convert_polcal_MS2UVFITS',
                                               False)

        # Target beam MS dataset converted to UVFITS?
        converttargetbeamsms2uvfits = get_param_def(
            self, 'convert_targetbeams_MS2UVFITS', np.full(nbeams, False))

        # Flux calibrator UVFITS dataset available?
        convertfluxcaluvfitsavailable = get_param_def(
            self, 'convert_fluxcal_UVFITSavailable', False)

        # Polarised calibrator UVFITS dataset available?
        convertpolcaluvfitsavailable = get_param_def(
            self, 'convert_polcal_UVFITSavailable', False)

        # Target beam UVFITS dataset available?
        converttargetbeamsuvfitsavailable = get_param_def(
            self, 'convert_targetbeams_UVFITSavailable',
            np.full(nbeams, False))

        # Flux calibrator UVFITS dataset converted to MIRIAD?
        convertfluxcaluvfits2miriad = get_param_def(
            self, 'convert_fluxcal_UVFITS2MIRIAD', False)

        # Polarised calibrator UVFITS dataset converted to MIRIAD?
        convertpolcaluvfits2miriad = get_param_def(
            self, 'convert_polcal_UVFITS2MIRIAD', False)
        # Target beam UVFITS dataset converted to MIRIAD?

        converttargetbeamsuvfits2miriad = get_param_def(
            self, 'convert_targetbeams_UVFITS2MIRIAD', np.full(nbeams, False))

        # Check which datasets are available in MS format #
        if self.fluxcal != '':
            convertfluxcalmsavailable = path.isdir(self.get_fluxcal_path())
        else:
            logger.warning(
                'Flux calibrator dataset not specified. Cannot convert flux calibrator!'
            )
        if self.polcal != '':
            convertpolcalmsavailable = path.isdir(self.get_polcal_path())
        else:
            logger.warning(
                'Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
            )
        if self.target != '':
            for b in range(nbeams):
                converttargetbeamsmsavailable[b] = path.isdir(
                    self.get_target_path(str(b).zfill(2)))
        else:
            logger.warning(
                'Target beam dataset not specified. Cannot convert target beams!'
            )

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_MSavailable',
                             convertfluxcalmsavailable)
        subs_param.add_param(self, 'convert_polcal_MSavailable',
                             convertpolcalmsavailable)
        subs_param.add_param(self, 'convert_targetbeams_MSavailable',
                             converttargetbeamsmsavailable)

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcalmsavailable:
                        logger.debug(
                            'Converting flux calibrator dataset from MS to UVFITS format.'
                        )
                        subs_managefiles.director(
                            self,
                            'mk',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fluxcal_ms = self.get_fluxcal_path()
                        if subs_msutils.has_correcteddata(fluxcal_ms):
                            datacolumn = "corrected"
                        else:
                            datacolumn = "data"
                            logger.warning(
                                'Flux calibrator does not have a corrected_data column! Using uncorrected'
                                'data for conversion!')

                        fluxcal_fits = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), fluxcal_ms)

                        fc_convert = exportuvfits_cmd.format(
                            vis=self.get_fluxcal_path(),
                            fits=fluxcal_fits,
                            datacolumn=datacolumn)

                        lib.run_casa([fc_convert], timeout=3600)
                        if path.isfile(fluxcal_fits):
                            convertfluxcalms2uvfits = True
                            logger.info(
                                'Converted flux calibrator dataset from MS to UVFITS format!'
                            )
                        else:
                            convertfluxcalms2uvfits = False
                            logger.warning(
                                'Could not convert flux calibrator dataset from MS to UVFITS format!'
                            )
                    else:
                        logger.warning(
                            'Flux calibrator dataset not available!')
                else:
                    logger.info(
                        'Flux calibrator dataset was already converted from MS to UVFITS format'
                    )
            else:
                logger.warning(
                    'Flux calibrator dataset not specified. Cannot convert flux calibrator!'
                )
        else:
            logger.warning('Not converting flux calibrator dataset!')

        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcalmsavailable:
                        logger.debug(
                            'Converting polarised calibrator dataset from MS to UVFITS format.'
                        )
                        subs_managefiles.director(
                            self,
                            'mk',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        polcal_ms = self.get_polcal_path()
                        if subs_msutils.has_correcteddata(polcal_ms):
                            datacolumn = "corrected"
                        else:
                            datacolumn = "data"
                            logger.warning(
                                'Polarised calibrator does not have a corrected_data column! Using'
                                'uncorrected data for conversion!')

                        polcal_fits = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), polcal_ms)

                        pc_convert = exportuvfits_cmd.format(
                            vis=polcal_ms,
                            fits=polcal_fits,
                            datacolumn=datacolumn)

                        lib.run_casa([pc_convert], timeout=3600)
                        if path.isfile(polcal_fits):
                            convertpolcalms2uvfits = True
                            logger.info(
                                'Converted polarised calibrator dataset from MS to UVFITS format!'
                            )
                        else:
                            convertpolcalms2uvfits = False
                            logger.warning(
                                'Could not convert polarised calibrator dataset from MS to UVFITS format!'
                            )
                    else:
                        logger.warning(
                            'Polarised calibrator dataset not available!')
                else:
                    logger.info(
                        'Polarised calibrator dataset was already converted from MS to UVFITS format'
                    )
            else:
                logger.warning(
                    'Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
                )
        else:
            logger.warning('Not converting polarised calibrator dataset!')

        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info(
                    'Converting target beam datasets from MS to UVFITS format.'
                )
                if self.convert_targetbeams == 'all':
                    datasets = self.get_datasets()
                    logger.debug(
                        'Converting all available target beam datasets')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = self.get_datasets(beams)
                    logger.debug(
                        'Converting all selected target beam datasets')
                for vis, beam in datasets:
                    if not converttargetbeamsuvfits2miriad[int(beam)]:
                        if converttargetbeamsmsavailable[int(beam)]:
                            subs_managefiles.director(
                                self,
                                'mk',
                                self.get_crosscalsubdir_path(beam),
                                verbose=False)

                            target_ms = self.get_target_path(beam)
                            target_fits = mspath_to_fitspath(
                                self.get_crosscalsubdir_path(beam), target_ms)

                            if subs_msutils.has_correcteddata(target_ms):
                                datacolumn = "corrected"
                            else:
                                datacolumn = "data"
                                logger.warning(
                                    'Target beam dataset {} does not have a corrected_data column! Using '
                                    'uncorrected data for conversion!'.format(
                                        beam))

                            cmd = exportuvfits_cmd.format(
                                vis=target_ms,
                                fits=target_fits,
                                beam=beam,
                                datacolumn=datacolumn)

                            lib.run_casa([cmd], timeout=7200)
                            if path.isfile(target_fits):
                                converttargetbeamsms2uvfits[int(beam)] = True
                                logger.debug(
                                    'Converted dataset of target beam '
                                    'l{} from MS to UVFITS format!'.format(
                                        beam))
                            else:
                                converttargetbeamsms2uvfits[int(beam)] = False
                                logger.warning(
                                    'Could not convert dataset for target beam '
                                    '{} from MS to UVFITS format!'.format(
                                        beam))
                        else:
                            logger.warning(
                                'Dataset for target beam {} not available!'.
                                format(beam))
                    else:
                        logger.info(
                            'Dataset for target beam {} was already '
                            'converted from MS to UVFITS format'.format(beam))
            else:
                logger.warning(
                    'Target beam dataset(s) not specified. Cannot convert target beam datasets!'
                )
        else:
            logger.warning('Not converting target beam dataset(s)!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_MS2UVFITS',
                             convertfluxcalms2uvfits)
        subs_param.add_param(self, 'convert_polcal_MS2UVFITS',
                             convertpolcalms2uvfits)
        subs_param.add_param(self, 'convert_targetbeams_MS2UVFITS',
                             converttargetbeamsms2uvfits)

        # Check which datasets are available in UVFITS format #
        if self.fluxcal != '':
            crosscal_fluxcal = mspath_to_fitspath(
                self.get_crosscalsubdir_path(), self.fluxcal)
            convertfluxcaluvfitsavailable = path.isfile(crosscal_fluxcal)
        else:
            logger.warning(
                'Flux calibrator dataset not specified. Cannot convert flux calibrator!'
            )
        if self.polcal != '':
            crosscal_polcal = mspath_to_fitspath(
                self.get_crosscalsubdir_path(), self.polcal)
            convertpolcaluvfitsavailable = path.isfile(crosscal_polcal)
        else:
            logger.warning(
                'Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
            )
        if self.target != '':
            for b in range(nbeams):
                b_formatted = str(b).zfill(2)
                converttargetbeamsuvfitsavailable[b] = path.isfile(
                    mspath_to_fitspath(
                        self.get_crosscalsubdir_path(b_formatted),
                        self.target))
        else:
            logger.warning(
                'Target beam dataset not specified. Cannot convert target beams!'
            )

        # Save the derived parameters for the availability to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_UVFITSavailable',
                             convertfluxcaluvfitsavailable)
        subs_param.add_param(self, 'convert_polcal_UVFITSavailable',
                             convertpolcaluvfitsavailable)
        subs_param.add_param(self, 'convert_targetbeams_UVFITSavailable',
                             converttargetbeamsuvfitsavailable)

        # Convert the available UVFITS-datasets to MIRIAD format #

        # Convert the flux calibrator
        if self.convert_fluxcal:
            if self.fluxcal != '':
                if not convertfluxcaluvfits2miriad:
                    if convertfluxcaluvfitsavailable:
                        logger.debug(
                            'Converting flux calibrator dataset from UVFITS to MIRIAD format.'
                        )
                        subs_managefiles.director(
                            self,
                            'ch',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), self.fluxcal)
                        fits.out = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(),
                            self.fluxcal,
                            ext='mir')
                        fits.go()
                        if path.isdir(fits.out):
                            convertfluxcaluvfits2miriad = True
                            logger.info(
                                'Converted flux calibrator dataset from UVFITS to MIRIAD format!'
                            )
                        else:
                            convertfluxcaluvfits2miriad = False
                            logger.warning(
                                'Could not convert flux calibrator dataset from UVFITS to MIRIAD format!'
                            )
                    else:
                        logger.warning(
                            'Flux calibrator dataset not available!')
                else:
                    logger.info(
                        'Flux calibrator dataset was already converted from UVFITS to MIRIAD format'
                    )
            else:
                logger.warning(
                    'Flux calibrator dataset not specified. Cannot convert flux calibrator!'
                )
        else:
            logger.warning('Not converting flux calibrator dataset!')
        # Convert the polarised calibrator
        if self.convert_polcal:
            if self.polcal != '':
                if not convertpolcaluvfits2miriad:
                    if convertpolcaluvfitsavailable:
                        logger.debug(
                            'Converting polarised calibrator dataset from UVFITS to MIRIAD format.'
                        )
                        subs_managefiles.director(
                            self,
                            'ch',
                            self.get_crosscalsubdir_path(),
                            verbose=False)
                        fits = lib.miriad('fits')
                        fits.op = 'uvin'
                        fits.in_ = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(), self.polcal)
                        fits.out = mspath_to_fitspath(
                            self.get_crosscalsubdir_path(),
                            self.polcal,
                            ext='mir')
                        fits.go()
                        if path.isdir(fits.out):
                            convertpolcaluvfits2miriad = True
                            logger.info(
                                'Converted polarised calibrator dataset from UVFITS to MIRIAD format!'
                            )
                        else:
                            convertpolcaluvfits2miriad = False
                            logger.warning(
                                'Could not convert polarised calibrator dataset from UVFITS to MIRIAD format!'
                            )
                    else:
                        logger.warning(
                            'Polarised calibrator dataset not available!')
                else:
                    logger.info(
                        'Polarised calibrator dataset was already converted from UVFITS to MIRIAD format'
                    )
            else:
                logger.warning(
                    'Polarised calibrator dataset not specified. Cannot convert polarised calibrator!'
                )
        else:
            logger.warning('Not converting polarised calibrator dataset!')
        # Convert the target beams
        if self.convert_target:
            if self.target != '':
                logger.info(
                    'Converting target beam datasets from UVFITS to MIRIAD format.'
                )
                if self.convert_targetbeams == 'all':
                    datasets = glob.glob(
                        mspath_to_fitspath(
                            self.get_crosscalsubdir_path('[0-9][0-9]'),
                            self.get_target_path()))
                    logger.debug(
                        'Converting all available target beam datasets')
                else:
                    beams = self.convert_targetbeams.split(",")
                    datasets = [
                        mspath_to_fitspath(
                            self.get_crosscalsubdir_path(str(b).zfill(2)),
                            self.target) for b in beams
                    ]
                    logger.debug(
                        'Converting all selected target beam datasets')
                for vis in datasets:
                    beam = vis.split('/')[-3]
                    if not converttargetbeamsuvfits2miriad[int(beam)]:
                        if converttargetbeamsuvfitsavailable[int(beam)]:
                            subs_managefiles.director(
                                self,
                                'ch',
                                self.get_crosscalsubdir_path(beam),
                                verbose=False)
                            fits = lib.miriad('fits')
                            fits.op = 'uvin'
                            fits.in_ = mspath_to_fitspath(
                                self.get_crosscalsubdir_path(beam),
                                self.target)
                            fits.out = mspath_to_fitspath(
                                self.get_crosscalsubdir_path(beam),
                                self.target,
                                ext='mir')
                            fits.go()
                            if path.isdir(fits.out):
                                converttargetbeamsuvfits2miriad[int(
                                    beam)] = True
                                logger.debug(
                                    'Converted dataset of target beam {} from '
                                    'UVFITS to MIRIAD format!'.format(beam))
                            else:
                                converttargetbeamsuvfits2miriad[int(
                                    beam)] = False
                                logger.warning(
                                    'Could not convert dataset for target beam '
                                    '{} from UVFITS to MIRIAD format!'.format(
                                        beam))
                        else:
                            logger.warning(
                                'Dataset for target beam {} not available!'.
                                format(beam))
                    else:
                        logger.info(
                            'Dataset for target beam {} was already converted '
                            'from MS to UVFITS format'.format(beam))
            else:
                logger.warning(
                    'Target beam dataset(s) not specified. Cannot convert target beam datasets!'
                )
        else:
            logger.warning('Not converting target beam dataset(s)!')

        # Save the derived parameters for the MS to UVFITS conversion to the parameter file

        subs_param.add_param(self, 'convert_fluxcal_UVFITS2MIRIAD',
                             convertfluxcaluvfits2miriad)
        subs_param.add_param(self, 'convert_polcal_UVFITS2MIRIAD',
                             convertpolcaluvfits2miriad)
        subs_param.add_param(self, 'convert_targetbeams_UVFITS2MIRIAD',
                             converttargetbeamsuvfits2miriad)

        # Remove the UVFITS files if wanted #
        if self.convert_removeuvfits:
            logger.info('Removing all UVFITS files')
            subs_managefiles.director(
                self, 'rm',
                mspath_to_fitspath(self.get_crosscalsubdir_path(),
                                   self.fluxcal))
            subs_managefiles.director(
                self, 'rm',
                mspath_to_fitspath(self.get_crosscalsubdir_path(),
                                   self.polcal))
            for beam in range(nbeams):
                basedir = self.get_crosscalsubdir_path(str(beam).zfill(2))
                if path.isdir(basedir):
                    subs_managefiles.director(self, 'rm', basedir, self.target)
Beispiel #29
0
def start_apercal_pipeline(targets,
                           fluxcals,
                           polcals,
                           dry_run=False,
                           basedir=None,
                           flip_ra=False,
                           steps=None,
                           configfilename=None):
    """
    Trigger the start of a fluxcal pipeline. Returns when pipeline is done.
    Example for taskid, name, beamnr: (190108926, '3C147_36', 36)
    Fluxcals and polcals can be specified in the wrong order, if the polcal is not polarised
    they will be flipped.
    If both polcals and fluxcals are set, they should both be the same length.
    A list of config files can be provided, i.e., one for each beam. If a single config file 
    is given, copies of it will be created so that there is one config per beam. If no
    config file is given, the default one is used and copies for each beam are made.

    Args:
        targets (Tuple[int, str, List[int]]): taskid, name, list of beamnrs
        fluxcals (List[Tuple[int, str, int]]): fluxcals: taskid, name, beamnr
        polcals (List[Tuple[int, str, int]]): polcals: taskid, name, beamnr (can be None)
        dry_run (bool): interpret arguments, do not actually run pipeline
        basedir (str): base directory; if not specified will be /data/apertif/{target_taskid}
        flip_ra (bool): flip RA (for old measurement sets where beamweights were flipped)
        steps (List[str]): list of steps to perform
        configfilename (List[str]): Custom configfile (should be full path for now)

    Returns:
        Tuple[Dict[int, List[str]], str], str: Tuple of a dict, the formatted runtime, and possibly
                                          an exception. The dict
                                          contains beam numbers (ints) as keys, a list of failed
                                          steps as values. Failed is defined here as 'threw an
                                          exception', only for target steps. Please also read logs.
    """
    if steps is None:
        steps = [
            "prepare", "split", "preflag", "ccal", "convert", "scal",
            "continuum", "polarisation", "line", "transfer"
        ]

    (taskid_target, name_target, beamlist_target) = targets

    # set the base directory if none was provided
    if not basedir:
        basedir = '/data/apertif/{}/'.format(taskid_target)
    elif len(basedir) > 0 and basedir[-1] != '/':
        basedir = basedir + '/'
    if not os.path.exists(basedir):
        os.mkdir(basedir)

    logfilepath = os.path.join(basedir, 'apercal.log')

    lib.setup_logger('debug', logfile=logfilepath)
    logger = logging.getLogger(__name__)
    gitinfo = subprocess.check_output('cd ' +
                                      os.path.dirname(apercal.__file__) +
                                      '&& git describe --tag; cd',
                                      shell=True).strip()
    logger.info("Apercal version: " + gitinfo)

    logger.info(
        "start_apercal called with arguments targets={}; fluxcals={}; polcals={}"
        .format(targets, fluxcals, polcals))
    logger.info("steps = {}".format(steps))

    # number of beams to process
    n_beams = len(beamlist_target)

    # check the input config file
    # get the default configfile if none was provided
    if not configfilename:
        logger.info("No config file provided, getting default config")
        # create a list of config file name
        configfilename_list = [
            os.path.join(
                basedir,
                "{0}_B{1}_Apercal_settings.cfg".format(taskid_target,
                                                       str(beam).zfill(2)))
            for beam in beamlist_target
        ]
        # get the default config settings
        config = lib.get_default_config()
        # go through the config files and create them
        for beam_index in range(n_beams):
            with open(configfilename_list[beam_index], "w") as fp:
                config.write(fp)
            logger.info("Beam {} config file saved to {}".format(
                beamlist_target[beam_index], configfilename_list[beam_index]))
    # if configfile(s) are given as a list
    elif type(configfilename) is list:
        # if it is just one, create copies for each beam in the base directory
        if len(configfilename) == 1:
            logger.info(
                "A single config file was provided. Creating copies of {}".
                format(configfilename[0]))
            configfilename_list = [
                os.path.join(
                    basedir, "{0}_B{1}_Apercal_settings.cfg".format(
                        taskid_target,
                        str(beam).zfill(2))) for beam in beamlist_target
            ]
            # make the copies
            for config in configfilename_list:
                lib.basher("cp " + str(configfilename[0]) + " " + str(config))
        elif len(configfilename) == n_beams:
            logger.info("Number of config files and target beams match.")
            configfilename_list = configfilename
        else:
            error = "Number of config files and target beams did not match. Abort"
            logger.error(error)
            raise RuntimeError(error)
    # if configfilename is just a string
    elif type(configfilename) is str:
        logger.info(
            "A single config file was provided. Creating copies of {}".format(
                configfilename))
        configfilename_list = [
            os.path.join(
                basedir,
                "{0}_B{1}_Apercal_settings.cfg".format(taskid_target,
                                                       str(beam).zfill(2)))
            for beam in beamlist_target
        ]
        # make the copies
        for config in configfilename_list:
            lib.basher("cp " + str(configfilename) + " " + str(config))
    else:
        error = "Unknown input for configfilename. Abort"
        logger.error(error)
        raise RuntimeError(error)

    status = pymp.shared.dict({beamnr: [] for beamnr in beamlist_target})

    if fluxcals:
        name_fluxcal = str(fluxcals[0][1]).strip().split('_')[0].upper()
    else:
        name_fluxcal = ''
    if polcals:
        name_polcal = str(polcals[0][1]).strip().split('_')[0].upper()
    else:
        name_polcal = ''
    name_target = str(name_target).strip()  # .upper()

    # If both fluxcal and polcal polarized, remove polcal
    if subs_calmodels.is_polarised(
            name_polcal) and subs_calmodels.is_polarised(name_fluxcal):
        name_polcal = ""

    if (fluxcals and fluxcals != '') and (polcals and polcals != ''):
        assert (len(fluxcals) == len(polcals))

    # avoid symmetry bias, if there is only a polcal but no fluxcal, switch them
    if fluxcals is None and polcals is not None:
        logger.info(
            "Only polcal was provided. Setting polcal {} to fluxcal".format(
                name_polcal))
        fluxcals, polcals = polcals, fluxcals
        name_polcal = ""
    # Exchange polcal and fluxcal if specified in the wrong order
    elif not subs_calmodels.is_polarised(name_polcal) and name_polcal != '':
        if subs_calmodels.is_polarised(name_fluxcal):
            logger.info("Switching polcal and fluxcal because " + name_polcal +
                        " is not polarised")
            fluxcals, polcals = polcals, fluxcals
            name_polcal = str(polcals[0][1]).strip()
        else:
            logger.info("Setting polcal to '' since " + name_polcal +
                        " is not polarised")
            name_polcal = ""
    elif name_polcal != '':
        logger.info("Polcal " + name_polcal + " is polarised, all good")

    def name_to_ms(name):
        if not name:
            return ''
        elif '3C' in name:
            return name.upper().strip().split('_')[0] + '.MS'
        else:
            return name + '.MS'

    def name_to_mir(name):
        if not name:
            return ''
        elif '3C' in name:
            return name.upper().strip().split('_')[0] + '.mir'
        else:
            return name + '.mir'

    def set_files(p):
        """
        Set the basedir, fluxcal, polcal, target properties

        Args:
            p (BaseModule): apercal step object (e.g. prepare)

        Returns:
            None
        """

        p.basedir = basedir
        p.fluxcal = name_to_ms(name_fluxcal)
        p.polcal = name_to_ms(name_polcal)
        p.target = name_to_ms(name_target)

        # debug_msg = """
        # p.basedir = basedir = {0};
        # p.fluxcal = name_to_ms(name_fluxcal) = {1};
        # p.polcal = name_to_ms(name_polcal) = {2};
        # p.target = name_to_ms(name_target) = {3};
        # """.format(basedir, name_to_ms(name_fluxcal), name_to_ms(name_polcal), name_to_ms(name_target))
        # logger.debug(debug_msg)

    beamnrs_fluxcal = [f[2] for f in fluxcals]
    if len(fluxcals) > 1:
        # Check every target beam has a fluxcal beam
        for beamnr_target in beamlist_target:
            assert (beamnr_target in beamnrs_fluxcal)

    # creating a copy of the target beamlist as a normal array
    # to avoid using np.where() for such a small thing
    if type(beamlist_target) == np.ndarray:
        beamlist_target_for_config = beamlist_target.tolist()
    else:
        beamlist_target_for_config = beamlist_target

    time_start = time()
    try:
        # =======
        # Prepare
        # =======

        # keep a start-finish record of step in the main log file
        if "prepare" in steps:
            logger.info("Running prepare")
            start_time_prepare = time()
        else:
            logger.info("Skipping prepare")

        # Prepare fluxcals
        for (taskid_fluxcal, name_fluxcal, beamnr_fluxcal) in fluxcals:
            p0 = prepare(file_=configfilename_list[
                beamlist_target_for_config.index(beamnr_fluxcal)])
            p0.basedir = basedir
            #set_files(p0)
            p0.prepare_flip_ra = flip_ra
            # the following two need to be empty strings for prepare
            p0.fluxcal = ''
            p0.polcal = ''
            p0.target = name_to_ms(name_fluxcal)
            p0.prepare_target_beams = str(beamnr_fluxcal)
            p0.prepare_date = str(taskid_fluxcal)[:6]
            p0.prepare_obsnum_target = validate_taskid(taskid_fluxcal)
            if "prepare" in steps and not dry_run:
                try:
                    p0.go()
                except Exception as e:
                    logger.warning("Prepare failed for fluxcal " +
                                   str(taskid_fluxcal) + " beam " +
                                   str(beamnr_fluxcal))
                    logger.exception(e)

        if 'prepare' in steps:
            # copy the param file generated here
            param_file = os.path.join(basedir, 'param.npy')
            director(p0,
                     'rn',
                     param_file.replace(
                         ".npy",
                         "_prepare_{}.npy".format(name_fluxcal.split('_')[0])),
                     file_=param_file,
                     ignore_nonexistent=True)

        # Prepare polcals
        if name_polcal != '':
            for (taskid_polcal, name_polcal, beamnr_polcal) in polcals:
                p0 = prepare(file_=configfilename_list[
                    beamlist_target_for_config.index(beamnr_polcal)])
                p0.basedir = basedir
                #set_files(p0)
                p0.prepare_flip_ra = flip_ra
                # the following two need to be empty strings for prepare
                p0.fluxcal = ''
                p0.polcal = ''
                p0.target = name_to_ms(name_polcal)
                p0.prepare_target_beams = str(beamnr_polcal)
                p0.prepare_date = str(taskid_polcal)[:6]
                p0.prepare_obsnum_target = validate_taskid(taskid_polcal)
                if "prepare" in steps and not dry_run:
                    try:
                        p0.go()
                    except Exception as e:
                        logger.warning("Prepare failed for polcal " +
                                       str(taskid_polcal) + " beam " +
                                       str(beamnr_polcal))
                        logger.exception(e)

            if 'prepare' in steps:
                # copy the param file generated here
                param_file = os.path.join(basedir, 'param.npy')
                director(p0,
                         'rn',
                         param_file.replace(
                             ".npy", "_prepare_{}.npy".format(
                                 name_polcal.split('_')[0])),
                         file_=param_file,
                         ignore_nonexistent=True)

        # Prepare target
        for beamnr in beamlist_target:
            p0 = prepare(file_=configfilename_list[
                beamlist_target_for_config.index(beamnr)])
            p0.basedir = basedir
            # set_files(p0)
            p0.prepare_flip_ra = flip_ra
            # the following two need to be empty strings for prepare
            p0.fluxcal = ''
            p0.polcal = ''
            p0.target = name_to_ms(name_target)
            p0.prepare_date = str(taskid_target)[:6]
            p0.prepare_obsnum_target = validate_taskid(taskid_target)
            p0.prepare_target_beams = ','.join(
                ['{:02d}'.format(beamnr) for beamnr in beamlist_target])
            if "prepare" in steps and not dry_run:
                try:
                    p0.go()
                except Exception as e:
                    logger.warning("Prepare failed for target " +
                                   str(taskid_target) + " beam " + str(beamnr))
                    logger.exception(e)
                    status[beamnr] += ['prepare']

        # keep a start-finish record of step in the main log file
        if "prepare" in steps:
            logger.info("Running prepare ... Done ({0:.0f}s)".format(
                time() - start_time_prepare))

            # copy the param file generated here
            param_file = os.path.join(basedir, 'param.npy')
            director(p0,
                     'rn',
                     param_file.replace(".npy",
                                        "_prepare_{}.npy".format(name_target)),
                     file_=param_file,
                     ignore_nonexistent=True)

        # =====
        # Split
        # =====

        # keep a start-finish record of step in the main log file
        if 'split' in steps:
            logger.info("Running split")
            start_time_split = time()
        else:
            logger.info("Skipping split")

        # Splitting a small chunk of data for quicklook pipeline
        # at the moment it all relies on the target beams
        # what if there are more calibrator than target beams-> realistic?
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                # individual logfiles for each process
                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                logger.debug("Starting logfile for beam " + str(beamnr))
                try:
                    s0 = split(file_=configfilename_list[beam_index])
                    set_files(s0)
                    s0.beam = "{:02d}".format(beamnr)
                    if "split" in steps and not dry_run:
                        s0.go()
                except Exception as e:
                    logger.warning("Split failed for {0} beam {1}".format(
                        str(taskid_target), str(beamnr)))
                    logger.exception(e)
                    # not sure if following line is necessary
                    status[beamnr] += ['split']

        # keep a start-finish record of step in the main log file
        if "split" in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)
            logger.info(
                "Running split ... Done ({0:.0f}s)".format(time() -
                                                           start_time_split))

            # copy the param file generated here
            # param_file = os.path.join(basedir, 'param.npy')
            # director(
            #     p0, 'rn', param_file.replace(".npy", "_split.npy"), file_=param_file, ignore_nonexistent=True)

        # =======
        # Preflag
        # =======

        # keep a record of the parallalised step in the main log file
        if "preflag" in steps:
            logger.info("Running preflag")
            start_time_preflag = time()
        else:
            logger.info("Skipping preflag")

        # In order to run in parallel, the bandpass table needs to exists
        # doing it here is not elegant but requires the least amount of changes
        # to preflage
        # with pymp.Parallel(10) as p:
        #     for beam_index in p.range(n_beams):
        #         beamnr = beamlist_target[beam_index]
        #         # individual logfiles for each process
        #         logfilepath = os.path.join(
        #             basedir, 'apercal{:02d}.log'.format(beamnr))
        #         lib.setup_logger('debug', logfile=logfilepath)
        #         logger = logging.getLogger(__name__)

        #         logger.debug("Starting logfile for beam " + str(beamnr))
        #         p1 = preflag(filename=configfilename)
        #         p1.paramfilename = 'param_{:02d}.npy'.format(beamnr)
        #         p1.basedir = basedir
        #         p1.fluxcal = ''
        #         p1.polcal = ''
        #         p1.target = name_to_ms(name_fluxcal)

        #         p1.beam = "{:02d}".format(beamnr)
        #         p1.preflag_targetbeams = "{:02d}".format(beamnr)
        #         if "preflag" in steps and not dry_run:
        #             try:
        #                 bandpass_start_time = time()
        #                 logger.info("Running aoflagger bandpass for flux calibrator {0} in beam {1}".format(
        #                     p1.target, p1.beam))
        #                 # director(
        #                 #     p1, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
        #                 p1.go()
        #                 # director(p1, 'rm', basedir + '/param.npy',
        #                 #         ignore_nonexistent=True)

        #                 # it is necessary to move the param files in order to keep them
        #                 param_file = basedir + \
        #                     '/param_{:02d}.npy'.format(beamnr)
        #                 director(
        #                     p1, 'mv', param_file, file_=param_file.replace(".npy", "_preflag_{0}.npy".format(name_fluxcal)), ignore_nonexistent=True)

        #                 p1.aoflagger_bandpass()
        #             except Exception as e:
        #                 logger.warning("Running aoflagger bandpass for flux calibrator {0} in beam {1} ... Failed ({2:.0f}s)".format(
        #                     p1.target, p1.beam, time() - bandpass_start_time))
        #                 logger.exception(e)
        #                 status[beamnr] += ['preflag_bandpass']
        #             else:
        #                 logger.info("Running aoflagger bandpass for flux calibrator {0} in beam {1} ... Done ({2:.0f}s)".format(
        #                     p1.target, p1.beam, time() - bandpass_start_time))

        # Flag fluxcal (pretending it's a target, parallelised version)
        # 5 in parallel
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                # individual logfiles for each process
                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                logger.debug("Starting logfile for beam " + str(beamnr))

                try:
                    p1 = preflag(filename=configfilename_list[beam_index])
                    p1.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p1.basedir = basedir
                    p1.fluxcal = ''
                    p1.polcal = ''
                    p1.target = name_to_ms(name_fluxcal)
                    p1.beam = "{:02d}".format(beamnr)
                    p1.preflag_targetbeams = "{:02d}".format(beamnr)
                    if beam_index < 2:
                        p1.preflag_aoflagger_threads = 9
                    else:
                        p1.preflag_aoflagger_threads = 10
                    if "preflag" in steps and not dry_run:
                        logger.info(
                            "Running preflag for flux calibrator {0} in beam {1}"
                            .format(p1.target, p1.beam))
                        preflag_flux_cal_start_time = time()
                        # director(
                        #     p1, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                        p1.go()

                        # it is necessary to move the param files in order to keep them
                        param_file = os.path.join(
                            basedir, 'param_{:02d}.npy'.format(beamnr))
                        director(p1,
                                 'rn',
                                 param_file.replace(
                                     ".npy", "_preflag_{0}.npy".format(
                                         name_fluxcal.split('_')[0])),
                                 file_=param_file,
                                 ignore_nonexistent=True)

                        logger.info(
                            "Running preflag for flux calibrator {0} in beam {1} ... Done ({2:.0f}s)"
                            .format(p1.target, p1.beam,
                                    time() - preflag_flux_cal_start_time))
                except Exception as e:
                    logger.warning(
                        "Running preflag for flux calibrator {0} in beam {1} ... Failed ({2:.0f}s)"
                        .format(p1.target, p1.beam,
                                time() - preflag_flux_cal_start_time))
                    logger.exception(e)
                    status[beamnr] += ['preflag']

        # Flag polcal (pretending it's a target, parallel version)
        # 5 in parallel
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                # individual logfiles for each process
                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                logger.debug("Starting logfile for beam " + str(beamnr))

                try:
                    p1 = preflag(filename=configfilename_list[beam_index])
                    # remove next line in final version
                    p1.preflag_aoflagger_version = 'local'
                    p1.basedir = basedir
                    p1.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p1.basedir = basedir
                    if name_polcal != '':
                        p1.fluxcal = ''
                        p1.polcal = ''
                        p1.target = name_to_ms(name_polcal)
                        p1.beam = "{:02d}".format(beamnr)
                        p1.preflag_targetbeams = "{:02d}".format(beamnr)
                        if beam_index < 2:
                            p1.preflag_aoflagger_threads = 9
                        else:
                            p1.preflag_aoflagger_threads = 10
                        if "preflag" in steps and not dry_run:
                            logger.info(
                                "Running preflag for pol calibrator {0} in beam {1}"
                                .format(p1.target, p1.beam))
                            preflag_pol_cal_start_time = time()
                            # director(
                            #     p1, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                            p1.go()

                            # it is necessary to move the param files in order to keep them
                            param_file = os.path.join(
                                basedir, 'param_{:02d}.npy'.format(beamnr))
                            director(p1,
                                     'rn',
                                     param_file.replace(
                                         ".npy", "_preflag_{0}.npy".format(
                                             name_polcal.split('_')[0])),
                                     file_=param_file,
                                     ignore_nonexistent=True)

                            logger.info(
                                "Running preflag for pol calibrator {0} in beam {1} ... Done ({2:.0f}s)"
                                .format(p1.target, p1.beam,
                                        time() - preflag_pol_cal_start_time))
                except Exception as e:
                    logger.warning(
                        "Running preflag for pol calibrator {0} in beam {1} ... Failed ({2:.0f}s)"
                        .format(p1.target, p1.beam,
                                time() - preflag_pol_cal_start_time))
                    logger.exception(e)
                    status[beamnr] += ['preflag']

        # Flag target
        # 5 in parallel
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                # individual logfiles for each process
                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                logger.debug("Starting logfile for beam " + str(beamnr))

                try:
                    p1 = preflag(filename=configfilename_list[beam_index])
                    # remove next line in final version
                    p1.preflag_aoflagger_version = 'local'
                    p1.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p1.basedir = basedir
                    p1.fluxcal = ''
                    p1.polcal = ''
                    p1.target = name_to_ms(name_target)
                    p1.beam = "{:02d}".format(beamnr)
                    p1.preflag_targetbeams = "{:02d}".format(beamnr)
                    if beam_index < 2:
                        p1.preflag_aoflagger_threads = 9
                    else:
                        p1.preflag_aoflagger_threads = 10
                    if "preflag" in steps and not dry_run:
                        logger.info(
                            "Running preflag for target {0} in beam {1}".
                            format(p1.target, p1.beam))
                        preflag_target_start_time = time()
                        # director(
                        #     p1, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                        p1.go()

                        # it is necessary to move the param files in order to keep them
                        param_file = os.path.join(
                            basedir, 'param_{:02d}.npy'.format(beamnr))
                        director(p1,
                                 'rn',
                                 param_file.replace(
                                     ".npy",
                                     "_preflag_{0}.npy".format(name_target)),
                                 file_=param_file,
                                 ignore_nonexistent=True)

                        logger.info(
                            "Running preflag for target {0} in beam {1} ... Done ({2:.0f}s)"
                            .format(p1.target, p1.beam,
                                    time() - preflag_target_start_time))
                except Exception as e:
                    logger.info(
                        "Running preflag for target {0} in beam {1} ... Failed ({2:.0f}s)"
                        .format(p1.target, p1.beam,
                                time() - preflag_target_start_time))
                    logger.exception(e)
                    status[beamnr] += ['preflag']

        # keep a record of the parallalised step in the main log file
        if "preflag" in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running preflag ... Done ({0:.0f}s)".format(
                time() - start_time_preflag))

        # ===============
        # Crosscal
        # ===============

        # keep a record of the parallalised step in the main log file
        if 'ccal' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running crosscal")
            start_time_crosscal = time()
        else:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Skipping crosscal")

        if len(fluxcals) == 1 and fluxcals[0][-1] == 0 and n_beams > 1:
            raise ApercalException(
                "Sorry, one fluxcal is not supported anymore at the moment")

        with pymp.Parallel(10) as p:
            for beam_index in p.range(n_beams):

                beamnr = beamlist_target[beam_index]
                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                logger.debug("Starting logfile for beam " + str(beamnr))
                try:
                    p2 = ccal(file_=configfilename_list[beam_index])
                    p2.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    set_files(p2)
                    p2.beam = "{:02d}".format(beamnr)
                    p2.crosscal_transfer_to_target_targetbeams = "{:02d}".format(
                        beamnr)
                    if "ccal" in steps and not dry_run:
                        # director(
                        #     p2, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                        p2.go()
                        # it is necessary to move the param files in order to keep them
                        param_file = os.path.join(
                            basedir, 'param_{:02d}.npy'.format(beamnr))
                        director(p2,
                                 'rn',
                                 param_file.replace(".npy", "_crosscal.npy"),
                                 file_=param_file,
                                 ignore_nonexistent=True)
                except Exception as e:
                    # Exception was already logged just before
                    logger.warning(
                        "Failed beam {}, skipping that from crosscal".format(
                            beamnr))
                    logger.exception(e)
                    status[beamnr] += ['crosscal']

        # keep a record of the parallalised step in the main log file
        if 'ccal' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running crosscal ... Done ({0:.0f}s)".format(
                time() - start_time_crosscal))

        # =======
        # Convert
        # =======

        # keep a record of the parallalised step in the main log file
        if 'convert' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running convert")
            start_time_convert = time()
        else:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Skipping convert")

        # 5 threads to not hammer the disks too much, convert is only IO
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                try:
                    p3 = convert(file_=configfilename_list[beam_index])
                    p3.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    set_files(p3)
                    p3.beam = "{:02d}".format(beamnr)
                    p3.convert_targetbeams = "{:02d}".format(beamnr)
                    if "convert" in steps and not dry_run:
                        # director(
                        #     p3, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                        p3.go()

                        # it is necessary to move the param files in order to keep them
                        param_file = os.path.join(
                            basedir, 'param_{:02d}.npy'.format(beamnr))
                        director(p3,
                                 'rn',
                                 param_file.replace(".npy", "_convert.npy"),
                                 file_=param_file,
                                 ignore_nonexistent=True)
                        # director(
                        #     p3, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                except Exception as e:
                    logger.warning(
                        "Failed beam {}, skipping that from convert".format(
                            beamnr))
                    logger.exception(e)
                    status[beamnr] += ['convert']

        if 'convert' in steps:
            # keep a record of the parallalised step in the main log file
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running convert ... Done ({0:.0f}s)".format(
                time() - start_time_convert))

        # ==================================
        # Selfcal + Continuum + Polarisation
        # ==================================

        # keep a record of the parallalised step in the main log file
        if 'scal' in steps or 'continuum' in steps or 'polarisation' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running selfcal and/or continuum and/or polarisation")
            start_time_selfcal_continuum_polarisation = time()
        else:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Skipping selfcal and continuum and polarisation")

        with pymp.Parallel(10) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                try:
                    p4 = scal(file_=configfilename_list[beam_index])
                    p4.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p4.basedir = basedir
                    p4.beam = "{:02d}".format(beamnr)
                    p4.target = name_target + '.mir'
                    if "scal" in steps and not dry_run:
                        p4.go()
                except Exception as e:
                    # Exception was already logged just before
                    logger.warning(
                        "Failed beam {}, skipping that from scal".format(
                            beamnr))
                    logger.exception(e)
                    status[beamnr] += ['scal']

                try:
                    p5 = continuum(file_=configfilename_list[beam_index])
                    p5.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p5.basedir = basedir
                    p5.beam = "{:02d}".format(beamnr)
                    p5.target = name_target + '.mir'
                    if "continuum" in steps and not dry_run:
                        p5.go()
                except Exception as e:
                    # Exception was already logged just before
                    logger.warning(
                        "Failed beam {}, skipping that from continuum".format(
                            beamnr))
                    logger.exception(e)
                    status[beamnr] += ['continuum']

                try:
                    p6 = polarisation(file_=configfilename_list[beam_index])
                    p6.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p6.basedir = basedir
                    p6.beam = "{:02d}".format(beamnr)
                    p6.polcal = name_to_mir(name_polcal)
                    p6.target = name_to_mir(name_target)
                    if "polarisation" in steps and not dry_run:
                        p6.go()
                except Exception as e:
                    # Exception was already logged just before
                    logger.warning(
                        "Failed beam {}, skipping that from polarisation".
                        format(beamnr))
                    logger.exception(e)
                    status[beamnr] += ['polarisation']

        # keep a record of the parallalised step in the main log file
        if 'scal' in steps or 'continuum' in steps or 'polarisation' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info(
                "Running selfcal and/or continuum and/or polarisation ... Done ({0:.0f}s)"
                .format(time() - start_time_selfcal_continuum_polarisation))

        # ====
        # Line
        # ====

        # keep a record of the parallalised step in the main log file
        if 'line' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger.info("Running line")
            start_time_line = time()
        else:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger.info("Skipping line")

        for beamnr in beamlist_target:

            # Because of the amount of information coming from line
            # this module gets its own logfile
            logfilepath = os.path.join(basedir,
                                       'apercal{:02d}_line.log'.format(beamnr))
            lib.setup_logger('debug', logfile=logfilepath)
            try:
                p7 = line(file_=configfilename_list[
                    beamlist_target_for_config.index(beamnr)])
                if beamnr not in p7.line_beams:
                    logger.debug(
                        "Skipping line imaging for beam {}".format(beamnr))
                    continue
                p7.basedir = basedir
                p7.beam = "{:02d}".format(beamnr)
                p7.target = name_target + '.mir'
                if "line" in steps and not dry_run:
                    p7.go()
            except Exception as e:
                # Exception was already logged just before
                logger.warning(
                    "Failed beam {}, skipping that from line".format(beamnr))
                logger.exception(e)
                status[beamnr] += ['line']

        # with pymp.Parallel(5) as p:
        #     for beam_index in p.range(n_beams):
        #         beamnr = beamlist_target[beam_index]

        #         logfilepath = os.path.join(
        #             basedir, 'apercal{:02d}.log'.format(beamnr))
        #         lib.setup_logger('debug', logfile=logfilepath)
        #         logger = logging.getLogger(__name__)

        #         try:
        #             p7 = line(file_=configfilename)
        #             if beamnr not in p7.line_beams:
        #                 logger.debug(
        #                     "Skipping line imaging for beam {}".format(beamnr))
        #                 continue
        #             p7.basedir = basedir
        #             p7.beam = "{:02d}".format(beamnr)
        #             p7.target = name_target + '.mir'
        #             if "line" in steps and not dry_run:
        #                 p7.go()
        #         except Exception as e:
        #             # Exception was already logged just before
        #             logger.warning(
        #                 "Failed beam {}, skipping that from line".format(beamnr))
        #             logger.exception(e)
        #             status[beamnr] += ['line']

        # keep a record of the parallalised step in the main log file
        if 'line' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info(
                "Running line ... Done ({0:.0f}s)".format(time() -
                                                          start_time_line))

        # ========
        # Transfer
        # ========

        # keep a record of the parallalised step in the main log file
        if 'transfer' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger.info("Running transfer")
            start_time_transfer = time()
        else:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger.info("Skipping transfer")

        # 5 threads to not hammer the disks too much during copying
        with pymp.Parallel(5) as p:
            for beam_index in p.range(n_beams):
                beamnr = beamlist_target[beam_index]

                logfilepath = os.path.join(basedir,
                                           'apercal{:02d}.log'.format(beamnr))
                lib.setup_logger('debug', logfile=logfilepath)
                logger = logging.getLogger(__name__)

                try:
                    p8 = transfer(file_=configfilename_list[beam_index])
                    p8.paramfilename = 'param_{:02d}.npy'.format(beamnr)
                    p8.basedir = basedir
                    p8.target = name_target + '.mir'
                    p8.beam = "{:02d}".format(beamnr)
                    if "transfer" in steps and not dry_run:
                        # director(
                        #     p8, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                        p8.go()
                except Exception as e:
                    logger.warning(
                        "Failed beam {}, skipping that from transfer".format(
                            beamnr))
                    logger.exception(e)
                    status[beamnr] += ['transfer']

        # keep a record of the parallalised step in the main log file
        if 'transfer' in steps:
            logfilepath = os.path.join(basedir, 'apercal.log')
            lib.setup_logger('debug', logfile=logfilepath)
            logger = logging.getLogger(__name__)

            logger.info("Running transfer ... Done ({0:.0f}s)".format(
                time() - start_time_transfer))

        # Polarisation
        # ============
        # keep a record of the parallalised step in the main log file
        # if 'polarisation' in steps:
        #     logfilepath = os.path.join(basedir, 'apercal.log')
        #     lib.setup_logger('debug', logfile=logfilepath)
        #     logger = logging.getLogger(__name__)

        #     logger.info("Running polarisation")
        #     start_time_polarisation = time()
        # else:
        #     logfilepath = os.path.join(basedir, 'apercal.log')
        #     lib.setup_logger('debug', logfile=logfilepath)
        #     logger = logging.getLogger(__name__)

        #     logger.info("Skipping polarisation")

        # with pymp.Parallel(5) as p:
        #     for beam_index in p.range(n_beams):
        #         beamnr = beamlist_target[beam_index]

        #         logfilepath = os.path.join(
        #             basedir, 'apercal{:02d}.log'.format(beamnr))
        #         lib.setup_logger('debug', logfile=logfilepath)
        #         logger = logging.getLogger(__name__)

        #         try:
        #             p7 = polarisation(file_=configfilename)
        #             p7.paramfilename = 'param_{:02d}.npy'.format(beamnr)
        #             p7.basedir = basedir
        #             p7.beam = "{:02d}".format(beamnr)
        #             p7.target = name_to_mir(name_target)
        #             if "polarisation" in steps and not dry_run:
        #                 p7.go()
        #         except Exception as e:
        #             # Exception was already logged just before
        #             logger.warning(
        #                 "Failed beam {}, skipping that from polarisation".format(beamnr))
        #             logger.exception(e)
        #             status[beamnr] += ['polarisation']

        # # keep a record of the parallalised step in the main log file
        # if 'polarisation' in steps:
        #     logfilepath = os.path.join(basedir, 'apercal.log')
        #     lib.setup_logger('debug', logfile=logfilepath)
        #     logger = logging.getLogger(__name__)

        #     logger.info("Running polarisation ... Done ({0:.0f}s)".format(
        #         time() - start_time_polarisation))

        # if "ccalqa" in steps and not dry_run:
        #     logger.info("Starting crosscal QA plots")
        #     try:
        #         make_all_ccal_plots(
        #             taskid_target, name_fluxcal.upper().strip().split('_')[0])
        #     except Exception as e:
        #         logger.warning("Failed crosscal QA plots")
        #         logger.exception(e)
        #     logger.info("Done with crosscal QA plots")

        status = status.copy()  # Convert pymp shared dict to a normal one
        msg = "Apercal finished after " + \
            str(timedelta(seconds=time() - time_start))
        logger.info(msg)
        return status, str(timedelta(seconds=time() - time_start)), None
    except Exception as e:
        msg = "Apercal threw an error after " + \
            str(timedelta(seconds=time() - time_start))
        logger.exception(msg)
        return status, str(timedelta(seconds=time() - time_start)), str(e)
Beispiel #30
0
def apercc(cal_list=None,
           task_id=None,
           cal_name=None,
           base_dir=None,
           search_all_nodes=False,
           steps=None):
    """
    Main function to run the cross-calibration stability evaluation.

    For a list of calibrator scans or a given task id and calibrator name, 
    this functions runs the cross-calibration evaluation.
    The function can get the data from ALTA and flags them using the Apercal modules
    prepare and preflag. It compares the bandpass solutions and gain factors between beams
    and between observations of the same calibrators
    The different steps can be selected individually.

    Example:
        scanid, source name, beam: [190108926, '3C147_36', 36]
        steps: ['prepare', 'preflag', 'crosscal', bpass_compare', 'gain_comare', 'bpass_compare_obs', 'gain_compare_obs']
        function cal: apercc(cal_list=[[190108926, '3C147_36', 36], [190108927, '3C147_37', 37]) or apercc(task_id = 190409056, cal_name='3C196')

    Args:
        cal_list (List(List(int, str, int)): scan id, source name, beam, optional
        base_dir (str): Name of directory to store the data,
            if not specified it will be /data/apertif/crosscal/<scanid> when new data is fetched
            or /data/apertif/<scanid> when existing data is used
        task_id (int): ID of scan to be used as main ID and for the directory,
            if not specified it will be the first scan id
        cal_name (str): Name of the calibrator,
            if not specified the first name in the calibrator list will be used
        search_all_nodes (bool): 
        steps (List(str)): List of steps in this task

    To Do: Use existing data using the task_id option and the name of the calibrator?

    """

    # General setup
    # =============

    # start time of this function
    start_time = time()

    # check input
    # if no list of a calibrators is given
    cal_list_mode = True
    if cal_list is None:
        # then it needs the task id and the calibrator name to look for existing data
        if task_id is not None and cal_name is not None:
            print(
                "Using task id and calibrator name. Assuming to use existing data. Will not run preflag, prepare and crosscal"
            )
            # check that if steps were provided, they don't contain preflag, prepare and crosscal
            if steps is not None:
                if 'prepare' in steps:
                    steps.remove('prepare')
                if 'preflag' in steps:
                    steps.remove('preflag')
                if 'crosscal' in steps:
                    steps.remove('crosscal')
            else:
                steps = [
                    'bpass_compare', 'gain_comare', 'bpass_compare_obs',
                    'gain_compare_obs'
                ]
            # using existing data
            cal_list_mode = False
        # otherwise it won't do anything
        else:
            print(
                "Input parameters incorrect. Please specify either cal_list or task_id and cal_name. Abort"
            )
            return -1
    else:
        print("Using list of calibrators")
        if not steps:
            steps = [
                'prepare', 'preflag', 'crosscal', 'bpass_compare',
                'gain_comare', 'bpass_compare_obs', 'gain_compare_obs'
            ]

    # # check that preflag is in it if prepare is run
    # else:
    #     if 'prepare' in steps and not 'preflag' in steps:
    #         steps.insert(1, 'preflag')

    # get the scan id to be used as the task id
    if not task_id:
        task_id = cal_list[0][0]
    else:
        task_id = task_id

    # create data directory unless specified using the first id unless otherwise specified
    # if no directory is specified
    if not base_dir:
        # and existing data is used, assume the base dir is an apercal processed data directory
        if task_id is not None and cal_name is not None:
            base_dir = '/data/apertif/{}/'.format(task_id)
        # if new data is fetched from the archive use a different default directory
        else:
            base_dir = '/data/apertif/crosscal/{}/'.format(task_id)
    elif len(base_dir) > 0 and base_dir[-1] != '/':
        base_dir = base_dir + '/'
    if not os.path.exists(base_dir) and cal_list_mode:
        try:
            os.mkdir(base_dir)
        except Exception as e:
            print("Creating the base directory failed. Abort")
            return -1
    elif not os.path.exists(base_dir):
        print("Directory was not found. Abort")
        return -1

    logfilepath = os.path.join(base_dir, 'apercc.log')

    lib.setup_logger('debug', logfile=logfilepath)
    logger = logging.getLogger(__name__)
    # gitinfo = subprocess.check_output('cd ' + os.path.dirname(apercal.__file__) +
    #                                   '&& git describe --tag; cd', shell=True).strip()

    # logger.info("Apercal version: " + gitinfo)

    logger.info("Apertif cross-calibration stability evaluation")

    if cal_list_mode:
        logger.info("Using list of calibrators as input !!!!")
    else:
        logger.info("Using task id and calibrator name as input !!!!")

    logger.debug("apercc called with arguments ...")
    logger.debug("cal_list={}".format(cal_list))
    logger.debug("task_id = {}".format(task_id))
    logger.debug("cal_name = {}".format(cal_name))
    logger.debug("base_dir = {}".format(base_dir))
    logger.debug("search_all_nodes = {}".format(search_all_nodes))
    logger.debug("steps = {}".format(steps))

    # number of calibrators
    if cal_list is not None:
        n_cals = len(cal_list)
        # get a list of beams
        beam_list = np.array([cal_list[k][2] for k in range(n_cals)])
    else:
        n_cals = 1

    # get the name of the flux calibrator
    if cal_name is None:
        name_cal = str(cal_list[0][1]).strip().split('_')[0]
    else:
        name_cal = cal_name

    # Getting the data using prepare
    # ==============================

    if "prepare" in steps:

        start_time_prepare = time()

        logger.info("Getting data for calibrators")

        # go through the list of calibrators and run prepare
        for (task_id_cal, name_cal, beamnr_cal) in cal_list:
            logger.info("Running prepare for {0} of beam {1}".format(
                name_cal, beamnr_cal))
            # create prepare object without config file
            prep = prepare(filename=None)
            # where to store the data
            prep.basedir = base_dir
            # give the calibrator as a target to prepare
            prep.fluxcal = ''
            prep.polcal = ''
            prep.target = name_cal.upper().strip().split('_')[0] + '.MS'
            prep.prepare_target_beams = str(beamnr_cal)
            prep.prepare_date = str(task_id_cal)[:6]
            prep.prepare_obsnum_target = str(task_id_cal)[-3:]
            try:
                prep.go()
            except Exception as e:
                logger.warning(
                    "Prepare failed for calibrator {0} ({1}) beam {2}".format(
                        str(task_id), name_cal, beamnr_cal))
                logger.exception(e)
            else:
                logger.info("Prepare successful for {0} of beam {1}".format(
                    name_cal, beamnr_cal))

        logger.info("Getting data for calibrators ... Done ({0:.0f}s)".format(
            time() - start_time_prepare))
    else:
        logger.info("Skipping getting data for calibrators")

    # Running preflag for calibrators
    # ===============================

    if 'preflag' in steps:
        start_time_flag = time()

        logger.info("Flagging data of calibrators")

        # Flag fluxcal (pretending it's a target)
        # needs to be changed for parallel preflag and make it a loop
        flag = preflag(filename=None)
        flag.basedir = base_dir
        flag.fluxcal = ''
        flag.polcal = ''
        flag.target = name_cal.upper().strip().split('_')[0] + '.MS'
        flag.beam = "{:02d}".format(beam_list[0])
        flag.preflag_targetbeams = "{:02d}".format(beamnr)
        try:
            director(flag,
                     'rm',
                     base_dir + '/param.npy',
                     ignore_nonexistent=True)
            flag.go()
        except Exception as e:
            logger.warning("Preflag failed")
            logger.exception(e)
        else:
            logger.info(
                "Flagging data of calibrators ... Done ({0:.0f}s)".format(
                    time() - start_time_flag))
    else:
        logger.info("Skipping running preflag for calibrators")

    # Running crosscal for calibrators
    # ===============================

    if 'crosscal' in steps:
        start_time_crosscal = time()

        logger.info("Running crosscal for calibrators")

        for beam_nr in beam_list:
            logger.info("Running crosscal for beam {0}".format(beam_nr))
            crosscal = ccal(file_=None)
            crosscal.basedir = base_dir
            crosscal.fluxcal = name_cal.upper().strip().split('_')[0] + '.MS'
            # p.polcal = name_to_ms(name_polcal)
            # p.target = name_to_ms(name_target)
            # p2.paramfilename = 'param_{:02d}.npy'.format(beamnr)
            crosscal.beam = "{:02d}".format(beam_nr)
            crosscal.crosscal_transfer_to_target = False
            # p2.crosscal_transfer_to_target_targetbeams = "{:02d}".format(
            #    beamnr)
            try:
                director(crosscal,
                         'rm',
                         base_dir + '/param.npy',
                         ignore_nonexistent=True)
                # director(
                #     p2, 'rm', basedir + '/param_{:02d}.npy'.format(beamnr), ignore_nonexistent=True)
                crosscal.go()
            except Exception as e:
                # Exception was already logged just before
                logger.warning("Failed crosscal for beam {}".format(beam_nr))
                logger.exception(e)
            else:
                logger.info(
                    "Running crosscal for beam {0} ... Done".format(beam_nr))

        logger.info(
            "Running crosscal for calibrators ... Done ({0:.0f}s)".format(
                time() - start_time_crosscal))
    else:
        logger.info("Skipping running crosscal for calibrators")

    # Running Bandbpass comparison
    # ============================

    if 'bpass_compare' in steps:

        start_time_prepare = time()

        logger.info("Comparing bandpass")

        logger.info("#### Doing nothing here yet ####")

        logger.info(
            "Comparing bandpass ... Done ({0:.0f})".format(time() -
                                                           start_time_prepare))
    else:
        logger.info("Skipping comparing bandpass")

    # Running Bandbpass comparison
    # ============================

    if 'gain_compare' in steps:

        start_time_gain = time()

        logger.info("Comparing gain solutions")

        logger.info("#### Doing nothing here yet ####")

        logger.info("Comparing gain solutions ... Done ({0:.0f})".format(
            time() - start_time_gain))
    else:
        logger.info("Skipping comparing gain solutions")

    # Running Bandbpass comparison between observations
    # =================================================
    if 'bpass_compare_obs' in steps:

        start_time_bandpass = time()

        logger.info("Comparing banpdass solutions across observations")

        logger.info("#### Doing nothing here yet ####")

        logger.info(
            "Comparing banpdass solutions across observations ... Done ({0:.0f})"
            .format(time() - start_time_bandpass))
    else:
        logger.info(
            "Skipping comparing banpdass solutions across observations")

    # Running Bandbpass comparison between observations
    # =================================================
    if 'bpass_compare_obs' in steps:

        start_time_gain = time()

        logger.info("Comparing banpdass solutions across observations")

        logger.info("#### Doing nothing here yet ####")

        logger.info(
            "Comparing banpdass solutions across observations ... Done ({0:.0f})"
            .format(time() - start_time_gain))
    else:
        logger.info(
            "Skipping comparing banpdass solutions across observations")

    logger.info(
        "Apertif cross-calibration stability evaluation ... Done ({0:.0f}s)".
        format(time() - start_time))